blob: 456e7e76080be17551f644b38f834679ee8afd30 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch014dc512016-03-22 12:00:34 +00007#include "src/arm64/frames-arm64.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/arm64/macro-assembler-arm64.h"
Ben Murdoch014dc512016-03-22 12:00:34 +00009#include "src/ast/scopes.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/compiler/code-generator-impl.h"
11#include "src/compiler/gap-resolver.h"
12#include "src/compiler/node-matchers.h"
Ben Murdoch014dc512016-03-22 12:00:34 +000013#include "src/compiler/osr.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014
15namespace v8 {
16namespace internal {
17namespace compiler {
18
19#define __ masm()->
20
21
22// Adds Arm64-specific methods to convert InstructionOperands.
Ben Murdoch014dc512016-03-22 12:00:34 +000023class Arm64OperandConverter final : public InstructionOperandConverter {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024 public:
25 Arm64OperandConverter(CodeGenerator* gen, Instruction* instr)
26 : InstructionOperandConverter(gen, instr) {}
27
Ben Murdoch014dc512016-03-22 12:00:34 +000028 DoubleRegister InputFloat32Register(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040029 return InputDoubleRegister(index).S();
30 }
31
Ben Murdoch014dc512016-03-22 12:00:34 +000032 DoubleRegister InputFloat64Register(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040033 return InputDoubleRegister(index);
34 }
35
Ben Murdoch014dc512016-03-22 12:00:34 +000036 size_t OutputCount() { return instr_->OutputCount(); }
37
Emily Bernier958fae72015-03-24 16:35:39 -040038 DoubleRegister OutputFloat32Register() { return OutputDoubleRegister().S(); }
39
40 DoubleRegister OutputFloat64Register() { return OutputDoubleRegister(); }
41
Ben Murdoch014dc512016-03-22 12:00:34 +000042 Register InputRegister32(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 return ToRegister(instr_->InputAt(index)).W();
44 }
45
Ben Murdoch014dc512016-03-22 12:00:34 +000046 Register InputOrZeroRegister32(size_t index) {
47 DCHECK(instr_->InputAt(index)->IsRegister() ||
48 (instr_->InputAt(index)->IsImmediate() && (InputInt32(index) == 0)));
49 if (instr_->InputAt(index)->IsImmediate()) {
50 return wzr;
51 }
52 return InputRegister32(index);
53 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054
Ben Murdoch014dc512016-03-22 12:00:34 +000055 Register InputRegister64(size_t index) { return InputRegister(index); }
56
57 Register InputOrZeroRegister64(size_t index) {
58 DCHECK(instr_->InputAt(index)->IsRegister() ||
59 (instr_->InputAt(index)->IsImmediate() && (InputInt64(index) == 0)));
60 if (instr_->InputAt(index)->IsImmediate()) {
61 return xzr;
62 }
63 return InputRegister64(index);
64 }
65
66 Operand InputImmediate(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000067 return ToImmediate(instr_->InputAt(index));
68 }
69
Ben Murdoch014dc512016-03-22 12:00:34 +000070 Operand InputOperand(size_t index) {
71 return ToOperand(instr_->InputAt(index));
72 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073
Ben Murdoch014dc512016-03-22 12:00:34 +000074 Operand InputOperand64(size_t index) { return InputOperand(index); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075
Ben Murdoch014dc512016-03-22 12:00:34 +000076 Operand InputOperand32(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 return ToOperand32(instr_->InputAt(index));
78 }
79
80 Register OutputRegister64() { return OutputRegister(); }
81
82 Register OutputRegister32() { return ToRegister(instr_->Output()).W(); }
83
Ben Murdoch014dc512016-03-22 12:00:34 +000084 Operand InputOperand2_32(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040085 switch (AddressingModeField::decode(instr_->opcode())) {
86 case kMode_None:
87 return InputOperand32(index);
88 case kMode_Operand2_R_LSL_I:
89 return Operand(InputRegister32(index), LSL, InputInt5(index + 1));
90 case kMode_Operand2_R_LSR_I:
91 return Operand(InputRegister32(index), LSR, InputInt5(index + 1));
92 case kMode_Operand2_R_ASR_I:
93 return Operand(InputRegister32(index), ASR, InputInt5(index + 1));
94 case kMode_Operand2_R_ROR_I:
95 return Operand(InputRegister32(index), ROR, InputInt5(index + 1));
Ben Murdoch014dc512016-03-22 12:00:34 +000096 case kMode_Operand2_R_UXTB:
97 return Operand(InputRegister32(index), UXTB);
98 case kMode_Operand2_R_UXTH:
99 return Operand(InputRegister32(index), UXTH);
100 case kMode_Operand2_R_SXTB:
101 return Operand(InputRegister32(index), SXTB);
102 case kMode_Operand2_R_SXTH:
103 return Operand(InputRegister32(index), SXTH);
Emily Bernier958fae72015-03-24 16:35:39 -0400104 case kMode_MRI:
105 case kMode_MRR:
106 break;
107 }
108 UNREACHABLE();
109 return Operand(-1);
110 }
111
Ben Murdoch014dc512016-03-22 12:00:34 +0000112 Operand InputOperand2_64(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -0400113 switch (AddressingModeField::decode(instr_->opcode())) {
114 case kMode_None:
115 return InputOperand64(index);
116 case kMode_Operand2_R_LSL_I:
117 return Operand(InputRegister64(index), LSL, InputInt6(index + 1));
118 case kMode_Operand2_R_LSR_I:
119 return Operand(InputRegister64(index), LSR, InputInt6(index + 1));
120 case kMode_Operand2_R_ASR_I:
121 return Operand(InputRegister64(index), ASR, InputInt6(index + 1));
122 case kMode_Operand2_R_ROR_I:
123 return Operand(InputRegister64(index), ROR, InputInt6(index + 1));
Ben Murdoch014dc512016-03-22 12:00:34 +0000124 case kMode_Operand2_R_UXTB:
125 return Operand(InputRegister64(index), UXTB);
126 case kMode_Operand2_R_UXTH:
127 return Operand(InputRegister64(index), UXTH);
128 case kMode_Operand2_R_SXTB:
129 return Operand(InputRegister64(index), SXTB);
130 case kMode_Operand2_R_SXTH:
131 return Operand(InputRegister64(index), SXTH);
Emily Bernier958fae72015-03-24 16:35:39 -0400132 case kMode_MRI:
133 case kMode_MRR:
134 break;
135 }
136 UNREACHABLE();
137 return Operand(-1);
138 }
139
Ben Murdoch014dc512016-03-22 12:00:34 +0000140 MemOperand MemoryOperand(size_t* first_index) {
141 const size_t index = *first_index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142 switch (AddressingModeField::decode(instr_->opcode())) {
143 case kMode_None:
Emily Bernier958fae72015-03-24 16:35:39 -0400144 case kMode_Operand2_R_LSL_I:
145 case kMode_Operand2_R_LSR_I:
146 case kMode_Operand2_R_ASR_I:
147 case kMode_Operand2_R_ROR_I:
Ben Murdoch014dc512016-03-22 12:00:34 +0000148 case kMode_Operand2_R_UXTB:
149 case kMode_Operand2_R_UXTH:
150 case kMode_Operand2_R_SXTB:
151 case kMode_Operand2_R_SXTH:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 break;
153 case kMode_MRI:
154 *first_index += 2;
155 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
156 case kMode_MRR:
157 *first_index += 2;
Emily Bernier958fae72015-03-24 16:35:39 -0400158 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000159 }
160 UNREACHABLE();
161 return MemOperand(no_reg);
162 }
163
Ben Murdoch014dc512016-03-22 12:00:34 +0000164 MemOperand MemoryOperand(size_t first_index = 0) {
Emily Bernier958fae72015-03-24 16:35:39 -0400165 return MemoryOperand(&first_index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000166 }
167
168 Operand ToOperand(InstructionOperand* op) {
169 if (op->IsRegister()) {
170 return Operand(ToRegister(op));
171 }
172 return ToImmediate(op);
173 }
174
175 Operand ToOperand32(InstructionOperand* op) {
176 if (op->IsRegister()) {
177 return Operand(ToRegister(op).W());
178 }
179 return ToImmediate(op);
180 }
181
182 Operand ToImmediate(InstructionOperand* operand) {
183 Constant constant = ToConstant(operand);
184 switch (constant.type()) {
185 case Constant::kInt32:
186 return Operand(constant.ToInt32());
187 case Constant::kInt64:
188 return Operand(constant.ToInt64());
Emily Bernier958fae72015-03-24 16:35:39 -0400189 case Constant::kFloat32:
190 return Operand(
191 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000192 case Constant::kFloat64:
193 return Operand(
194 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
195 case Constant::kExternalReference:
196 return Operand(constant.ToExternalReference());
197 case Constant::kHeapObject:
198 return Operand(constant.ToHeapObject());
Emily Bernier958fae72015-03-24 16:35:39 -0400199 case Constant::kRpoNumber:
200 UNREACHABLE(); // TODO(dcarney): RPO immediates on arm64.
201 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000202 }
203 UNREACHABLE();
204 return Operand(-1);
205 }
206
207 MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const {
Ben Murdoch014dc512016-03-22 12:00:34 +0000208 DCHECK_NOT_NULL(op);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000209 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100210 return SlotToMemOperand(AllocatedOperand::cast(op)->index(), masm);
211 }
212
213 MemOperand SlotToMemOperand(int slot, MacroAssembler* masm) const {
214 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
Ben Murdoch014dc512016-03-22 12:00:34 +0000215 if (offset.from_frame_pointer()) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100216 int from_sp = offset.offset() + frame_access_state()->GetSPToFPOffset();
Ben Murdoch014dc512016-03-22 12:00:34 +0000217 // Convert FP-offsets to SP-offsets if it results in better code.
218 if (Assembler::IsImmLSUnscaled(from_sp) ||
219 Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) {
220 offset = FrameOffset::FromStackPointer(from_sp);
221 }
222 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000223 return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp,
224 offset.offset());
225 }
226};
227
228
Emily Bernier958fae72015-03-24 16:35:39 -0400229namespace {
230
Ben Murdoch014dc512016-03-22 12:00:34 +0000231class OutOfLineLoadNaN32 final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400232 public:
233 OutOfLineLoadNaN32(CodeGenerator* gen, DoubleRegister result)
234 : OutOfLineCode(gen), result_(result) {}
235
Ben Murdoch014dc512016-03-22 12:00:34 +0000236 void Generate() final {
Emily Bernier958fae72015-03-24 16:35:39 -0400237 __ Fmov(result_, std::numeric_limits<float>::quiet_NaN());
238 }
239
240 private:
241 DoubleRegister const result_;
242};
243
244
Ben Murdoch014dc512016-03-22 12:00:34 +0000245class OutOfLineLoadNaN64 final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400246 public:
247 OutOfLineLoadNaN64(CodeGenerator* gen, DoubleRegister result)
248 : OutOfLineCode(gen), result_(result) {}
249
Ben Murdoch014dc512016-03-22 12:00:34 +0000250 void Generate() final {
Emily Bernier958fae72015-03-24 16:35:39 -0400251 __ Fmov(result_, std::numeric_limits<double>::quiet_NaN());
252 }
253
254 private:
255 DoubleRegister const result_;
256};
257
258
Ben Murdoch014dc512016-03-22 12:00:34 +0000259class OutOfLineLoadZero final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400260 public:
261 OutOfLineLoadZero(CodeGenerator* gen, Register result)
262 : OutOfLineCode(gen), result_(result) {}
263
Ben Murdoch014dc512016-03-22 12:00:34 +0000264 void Generate() final { __ Mov(result_, 0); }
Emily Bernier958fae72015-03-24 16:35:39 -0400265
266 private:
267 Register const result_;
268};
269
Ben Murdoch014dc512016-03-22 12:00:34 +0000270
271class OutOfLineRecordWrite final : public OutOfLineCode {
272 public:
Ben Murdoch109988c2016-05-18 11:27:45 +0100273 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand index,
Ben Murdoch014dc512016-03-22 12:00:34 +0000274 Register value, Register scratch0, Register scratch1,
275 RecordWriteMode mode)
276 : OutOfLineCode(gen),
277 object_(object),
278 index_(index),
279 value_(value),
280 scratch0_(scratch0),
281 scratch1_(scratch1),
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100282 mode_(mode),
283 must_save_lr_(!gen->frame_access_state()->has_frame()) {}
Ben Murdoch014dc512016-03-22 12:00:34 +0000284
285 void Generate() final {
286 if (mode_ > RecordWriteMode::kValueIsPointer) {
287 __ JumpIfSmi(value_, exit());
288 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100289 __ CheckPageFlagClear(value_, scratch0_,
290 MemoryChunk::kPointersToHereAreInterestingMask,
291 exit());
292 RememberedSetAction const remembered_set_action =
293 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
294 : OMIT_REMEMBERED_SET;
Ben Murdoch014dc512016-03-22 12:00:34 +0000295 SaveFPRegsMode const save_fp_mode =
296 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100297 if (must_save_lr_) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100298 // We need to save and restore lr if the frame was elided.
299 __ Push(lr);
300 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000301 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch109988c2016-05-18 11:27:45 +0100302 remembered_set_action, save_fp_mode);
Ben Murdoch014dc512016-03-22 12:00:34 +0000303 __ Add(scratch1_, object_, index_);
304 __ CallStub(&stub);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100305 if (must_save_lr_) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100306 __ Pop(lr);
307 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000308 }
309
310 private:
311 Register const object_;
Ben Murdoch109988c2016-05-18 11:27:45 +0100312 Operand const index_;
Ben Murdoch014dc512016-03-22 12:00:34 +0000313 Register const value_;
314 Register const scratch0_;
315 Register const scratch1_;
316 RecordWriteMode const mode_;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100317 bool must_save_lr_;
Ben Murdoch014dc512016-03-22 12:00:34 +0000318};
319
320
321Condition FlagsConditionToCondition(FlagsCondition condition) {
322 switch (condition) {
323 case kEqual:
324 return eq;
325 case kNotEqual:
326 return ne;
327 case kSignedLessThan:
328 return lt;
329 case kSignedGreaterThanOrEqual:
330 return ge;
331 case kSignedLessThanOrEqual:
332 return le;
333 case kSignedGreaterThan:
334 return gt;
335 case kUnsignedLessThan:
336 return lo;
337 case kUnsignedGreaterThanOrEqual:
338 return hs;
339 case kUnsignedLessThanOrEqual:
340 return ls;
341 case kUnsignedGreaterThan:
342 return hi;
343 case kFloatLessThanOrUnordered:
344 return lt;
345 case kFloatGreaterThanOrEqual:
346 return ge;
347 case kFloatLessThanOrEqual:
348 return ls;
349 case kFloatGreaterThanOrUnordered:
350 return hi;
351 case kFloatLessThan:
352 return lo;
353 case kFloatGreaterThanOrEqualOrUnordered:
354 return hs;
355 case kFloatLessThanOrEqualOrUnordered:
356 return le;
357 case kFloatGreaterThan:
358 return gt;
359 case kOverflow:
360 return vs;
361 case kNotOverflow:
362 return vc;
363 case kUnorderedEqual:
364 case kUnorderedNotEqual:
365 break;
366 }
367 UNREACHABLE();
368 return nv;
369}
370
Emily Bernier958fae72015-03-24 16:35:39 -0400371} // namespace
372
373
374#define ASSEMBLE_CHECKED_LOAD_FLOAT(width) \
375 do { \
376 auto result = i.OutputFloat##width##Register(); \
377 auto buffer = i.InputRegister(0); \
378 auto offset = i.InputRegister32(1); \
379 auto length = i.InputOperand32(2); \
380 __ Cmp(offset, length); \
381 auto ool = new (zone()) OutOfLineLoadNaN##width(this, result); \
382 __ B(hs, ool->entry()); \
383 __ Ldr(result, MemOperand(buffer, offset, UXTW)); \
384 __ Bind(ool->exit()); \
385 } while (0)
386
387
388#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
389 do { \
390 auto result = i.OutputRegister32(); \
391 auto buffer = i.InputRegister(0); \
392 auto offset = i.InputRegister32(1); \
393 auto length = i.InputOperand32(2); \
394 __ Cmp(offset, length); \
395 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
396 __ B(hs, ool->entry()); \
397 __ asm_instr(result, MemOperand(buffer, offset, UXTW)); \
398 __ Bind(ool->exit()); \
399 } while (0)
400
401
Ben Murdoch014dc512016-03-22 12:00:34 +0000402#define ASSEMBLE_CHECKED_LOAD_INTEGER_64(asm_instr) \
403 do { \
404 auto result = i.OutputRegister(); \
405 auto buffer = i.InputRegister(0); \
406 auto offset = i.InputRegister32(1); \
407 auto length = i.InputOperand32(2); \
408 __ Cmp(offset, length); \
409 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
410 __ B(hs, ool->entry()); \
411 __ asm_instr(result, MemOperand(buffer, offset, UXTW)); \
412 __ Bind(ool->exit()); \
413 } while (0)
414
415
Emily Bernier958fae72015-03-24 16:35:39 -0400416#define ASSEMBLE_CHECKED_STORE_FLOAT(width) \
417 do { \
418 auto buffer = i.InputRegister(0); \
419 auto offset = i.InputRegister32(1); \
420 auto length = i.InputOperand32(2); \
421 auto value = i.InputFloat##width##Register(3); \
422 __ Cmp(offset, length); \
423 Label done; \
424 __ B(hs, &done); \
425 __ Str(value, MemOperand(buffer, offset, UXTW)); \
426 __ Bind(&done); \
427 } while (0)
428
429
430#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
431 do { \
432 auto buffer = i.InputRegister(0); \
433 auto offset = i.InputRegister32(1); \
434 auto length = i.InputOperand32(2); \
435 auto value = i.InputRegister32(3); \
436 __ Cmp(offset, length); \
437 Label done; \
438 __ B(hs, &done); \
439 __ asm_instr(value, MemOperand(buffer, offset, UXTW)); \
440 __ Bind(&done); \
441 } while (0)
442
443
Ben Murdoch014dc512016-03-22 12:00:34 +0000444#define ASSEMBLE_CHECKED_STORE_INTEGER_64(asm_instr) \
445 do { \
446 auto buffer = i.InputRegister(0); \
447 auto offset = i.InputRegister32(1); \
448 auto length = i.InputOperand32(2); \
449 auto value = i.InputRegister(3); \
450 __ Cmp(offset, length); \
451 Label done; \
452 __ B(hs, &done); \
453 __ asm_instr(value, MemOperand(buffer, offset, UXTW)); \
454 __ Bind(&done); \
Emily Bernier958fae72015-03-24 16:35:39 -0400455 } while (0)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000456
457
Ben Murdoch014dc512016-03-22 12:00:34 +0000458#define ASSEMBLE_SHIFT(asm_instr, width) \
459 do { \
460 if (instr->InputAt(1)->IsRegister()) { \
461 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \
462 i.InputRegister##width(1)); \
463 } else { \
464 uint32_t imm = \
465 static_cast<uint32_t>(i.InputOperand##width(1).ImmediateValue()); \
466 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \
467 imm % (width)); \
468 } \
469 } while (0)
470
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100471void CodeGenerator::AssembleDeconstructFrame() {
472 const CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
473 if (descriptor->IsCFunctionCall() || descriptor->UseNativeStack()) {
474 __ Mov(csp, fp);
475 } else {
476 __ Mov(jssp, fp);
477 }
478 __ Pop(fp, lr);
479}
Ben Murdoch014dc512016-03-22 12:00:34 +0000480
481void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
482 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
483 if (sp_slot_delta > 0) {
484 __ Drop(sp_slot_delta);
485 }
486 frame_access_state()->SetFrameAccessToDefault();
487}
488
489
490void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
491 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
492 if (sp_slot_delta < 0) {
493 __ Claim(-sp_slot_delta);
494 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
495 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100496 if (frame_access_state()->has_frame()) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000497 __ Ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
498 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
499 }
500 frame_access_state()->SetFrameAccessToSP();
501}
502
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100503void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
504 Register scratch1,
505 Register scratch2,
506 Register scratch3) {
507 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
508 Label done;
509
510 // Check if current frame is an arguments adaptor frame.
511 __ Ldr(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
512 __ Cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
513 __ B(ne, &done);
514
515 // Load arguments count from current arguments adaptor frame (note, it
516 // does not include receiver).
517 Register caller_args_count_reg = scratch1;
518 __ Ldr(caller_args_count_reg,
519 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
520 __ SmiUntag(caller_args_count_reg);
521
522 ParameterCount callee_args_count(args_reg);
523 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
524 scratch3);
525 __ bind(&done);
526}
Ben Murdoch014dc512016-03-22 12:00:34 +0000527
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000528// Assembles an instruction after register allocation, producing machine code.
529void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
530 Arm64OperandConverter i(this, instr);
531 InstructionCode opcode = instr->opcode();
Ben Murdoch109988c2016-05-18 11:27:45 +0100532 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
533 switch (arch_opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000534 case kArchCallCodeObject: {
535 EnsureSpaceForLazyDeopt();
536 if (instr->InputAt(0)->IsImmediate()) {
537 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
538 RelocInfo::CODE_TARGET);
539 } else {
540 Register target = i.InputRegister(0);
541 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
542 __ Call(target);
543 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100544 RecordCallPosition(instr);
Ben Murdoch109988c2016-05-18 11:27:45 +0100545 // TODO(titzer): this is ugly. JSSP should be a caller-save register
546 // in this case, but it is not possible to express in the register
547 // allocator.
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100548 CallDescriptor::Flags flags(MiscField::decode(opcode));
Ben Murdoch109988c2016-05-18 11:27:45 +0100549 if (flags & CallDescriptor::kRestoreJSSP) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100550 __ Ldr(jssp, MemOperand(csp));
551 __ Mov(csp, jssp);
552 }
553 if (flags & CallDescriptor::kRestoreCSP) {
554 __ Mov(csp, jssp);
555 __ AssertCspAligned();
Ben Murdoch109988c2016-05-18 11:27:45 +0100556 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000557 frame_access_state()->ClearSPDelta();
Ben Murdoch014dc512016-03-22 12:00:34 +0000558 break;
559 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100560 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch014dc512016-03-22 12:00:34 +0000561 case kArchTailCallCodeObject: {
562 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
563 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100564 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
565 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
566 i.TempRegister(0), i.TempRegister(1),
567 i.TempRegister(2));
568 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000569 if (instr->InputAt(0)->IsImmediate()) {
570 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
571 RelocInfo::CODE_TARGET);
572 } else {
573 Register target = i.InputRegister(0);
574 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
575 __ Jump(target);
576 }
577 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000578 break;
579 }
580 case kArchCallJSFunction: {
581 EnsureSpaceForLazyDeopt();
582 Register func = i.InputRegister(0);
583 if (FLAG_debug_code) {
584 // Check the function's context matches the context argument.
585 UseScratchRegisterScope scope(masm());
586 Register temp = scope.AcquireX();
587 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
588 __ cmp(cp, temp);
589 __ Assert(eq, kWrongFunctionContext);
590 }
591 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
592 __ Call(x10);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100593 RecordCallPosition(instr);
Ben Murdoch109988c2016-05-18 11:27:45 +0100594 // TODO(titzer): this is ugly. JSSP should be a caller-save register
595 // in this case, but it is not possible to express in the register
596 // allocator.
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100597 CallDescriptor::Flags flags(MiscField::decode(opcode));
Ben Murdoch109988c2016-05-18 11:27:45 +0100598 if (flags & CallDescriptor::kRestoreJSSP) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100599 __ Ldr(jssp, MemOperand(csp));
600 __ Mov(csp, jssp);
601 }
602 if (flags & CallDescriptor::kRestoreCSP) {
603 __ Mov(csp, jssp);
604 __ AssertCspAligned();
Ben Murdoch109988c2016-05-18 11:27:45 +0100605 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000606 frame_access_state()->ClearSPDelta();
Ben Murdoch014dc512016-03-22 12:00:34 +0000607 break;
608 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100609 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch014dc512016-03-22 12:00:34 +0000610 case kArchTailCallJSFunction: {
611 Register func = i.InputRegister(0);
612 if (FLAG_debug_code) {
613 // Check the function's context matches the context argument.
614 UseScratchRegisterScope scope(masm());
615 Register temp = scope.AcquireX();
616 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
617 __ cmp(cp, temp);
618 __ Assert(eq, kWrongFunctionContext);
619 }
620 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
621 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100622 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) {
623 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
624 i.TempRegister(0), i.TempRegister(1),
625 i.TempRegister(2));
626 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000627 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
628 __ Jump(x10);
629 frame_access_state()->ClearSPDelta();
630 break;
631 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000632 case kArchPrepareCallCFunction:
633 // We don't need kArchPrepareCallCFunction on arm64 as the instruction
634 // selector already perform a Claim to reserve space on the stack and
635 // guarantee correct alignment of stack pointer.
636 UNREACHABLE();
637 break;
638 case kArchPrepareTailCall:
639 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
640 break;
641 case kArchCallCFunction: {
642 int const num_parameters = MiscField::decode(instr->opcode());
643 if (instr->InputAt(0)->IsImmediate()) {
644 ExternalReference ref = i.InputExternalReference(0);
645 __ CallCFunction(ref, num_parameters, 0);
646 } else {
647 Register func = i.InputRegister(0);
648 __ CallCFunction(func, num_parameters, 0);
649 }
650 // CallCFunction only supports register arguments so we never need to call
651 // frame()->ClearOutgoingParameterSlots() here.
652 DCHECK(frame_access_state()->sp_delta() == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000653 break;
654 }
655 case kArchJmp:
Emily Bernier958fae72015-03-24 16:35:39 -0400656 AssembleArchJump(i.InputRpo(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000657 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000658 case kArchTableSwitch:
659 AssembleArchTableSwitch(instr);
660 break;
661 case kArchLookupSwitch:
662 AssembleArchLookupSwitch(instr);
663 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000664 case kArchNop:
Ben Murdoch014dc512016-03-22 12:00:34 +0000665 case kArchThrowTerminator:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000666 // don't emit code for nops.
667 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000668 case kArchDeoptimize: {
669 int deopt_state_id =
670 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
671 Deoptimizer::BailoutType bailout_type =
672 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
673 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
674 break;
675 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000676 case kArchRet:
677 AssembleReturn();
678 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400679 case kArchStackPointer:
680 __ mov(i.OutputRegister(), masm()->StackPointer());
681 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000682 case kArchFramePointer:
683 __ mov(i.OutputRegister(), fp);
684 break;
Ben Murdoch109988c2016-05-18 11:27:45 +0100685 case kArchParentFramePointer:
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100686 if (frame_access_state()->has_frame()) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100687 __ ldr(i.OutputRegister(), MemOperand(fp, 0));
688 } else {
689 __ mov(i.OutputRegister(), fp);
690 }
691 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000692 case kArchTruncateDoubleToI:
693 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
694 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000695 case kArchStoreWithWriteBarrier: {
696 RecordWriteMode mode =
697 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
Ben Murdoch109988c2016-05-18 11:27:45 +0100698 AddressingMode addressing_mode =
699 AddressingModeField::decode(instr->opcode());
Ben Murdoch014dc512016-03-22 12:00:34 +0000700 Register object = i.InputRegister(0);
Ben Murdoch109988c2016-05-18 11:27:45 +0100701 Operand index(0);
702 if (addressing_mode == kMode_MRI) {
703 index = Operand(i.InputInt64(1));
704 } else {
705 DCHECK_EQ(addressing_mode, kMode_MRR);
706 index = Operand(i.InputRegister(1));
707 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000708 Register value = i.InputRegister(2);
709 Register scratch0 = i.TempRegister(0);
710 Register scratch1 = i.TempRegister(1);
711 auto ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
712 scratch0, scratch1, mode);
713 __ Str(value, MemOperand(object, index));
714 __ CheckPageFlagSet(object, scratch0,
715 MemoryChunk::kPointersFromHereAreInterestingMask,
716 ool->entry());
717 __ Bind(ool->exit());
Emily Bernier958fae72015-03-24 16:35:39 -0400718 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000719 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100720 case kArchStackSlot: {
721 FrameOffset offset =
722 frame_access_state()->GetFrameOffset(i.InputInt32(0));
723 Register base;
724 if (offset.from_stack_pointer()) {
725 base = __ StackPointer();
726 } else {
727 base = fp;
728 }
729 __ Add(i.OutputRegister(0), base, Operand(offset.offset()));
730 break;
731 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000732 case kArm64Float32RoundDown:
733 __ Frintm(i.OutputFloat32Register(), i.InputFloat32Register(0));
734 break;
735 case kArm64Float64RoundDown:
Emily Bernier958fae72015-03-24 16:35:39 -0400736 __ Frintm(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
737 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000738 case kArm64Float32RoundUp:
739 __ Frintp(i.OutputFloat32Register(), i.InputFloat32Register(0));
740 break;
741 case kArm64Float64RoundUp:
742 __ Frintp(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
Emily Bernier958fae72015-03-24 16:35:39 -0400743 break;
744 case kArm64Float64RoundTiesAway:
745 __ Frinta(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
746 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000747 case kArm64Float32RoundTruncate:
748 __ Frintz(i.OutputFloat32Register(), i.InputFloat32Register(0));
749 break;
750 case kArm64Float64RoundTruncate:
751 __ Frintz(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
752 break;
753 case kArm64Float32RoundTiesEven:
754 __ Frintn(i.OutputFloat32Register(), i.InputFloat32Register(0));
755 break;
756 case kArm64Float64RoundTiesEven:
757 __ Frintn(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
758 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000759 case kArm64Add:
Ben Murdoch014dc512016-03-22 12:00:34 +0000760 if (FlagsModeField::decode(opcode) != kFlags_none) {
761 __ Adds(i.OutputRegister(), i.InputOrZeroRegister64(0),
762 i.InputOperand2_64(1));
763 } else {
764 __ Add(i.OutputRegister(), i.InputOrZeroRegister64(0),
765 i.InputOperand2_64(1));
766 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000767 break;
768 case kArm64Add32:
769 if (FlagsModeField::decode(opcode) != kFlags_none) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000770 __ Adds(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400771 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000772 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +0000773 __ Add(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400774 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000775 }
776 break;
777 case kArm64And:
Ben Murdoch014dc512016-03-22 12:00:34 +0000778 __ And(i.OutputRegister(), i.InputOrZeroRegister64(0),
779 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000780 break;
781 case kArm64And32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000782 __ And(i.OutputRegister32(), i.InputOrZeroRegister32(0),
783 i.InputOperand2_32(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400784 break;
785 case kArm64Bic:
Ben Murdoch014dc512016-03-22 12:00:34 +0000786 __ Bic(i.OutputRegister(), i.InputOrZeroRegister64(0),
787 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400788 break;
789 case kArm64Bic32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000790 __ Bic(i.OutputRegister32(), i.InputOrZeroRegister32(0),
791 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000792 break;
793 case kArm64Mul:
794 __ Mul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
795 break;
796 case kArm64Mul32:
797 __ Mul(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
798 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400799 case kArm64Smull:
800 __ Smull(i.OutputRegister(), i.InputRegister32(0), i.InputRegister32(1));
801 break;
802 case kArm64Umull:
803 __ Umull(i.OutputRegister(), i.InputRegister32(0), i.InputRegister32(1));
804 break;
805 case kArm64Madd:
806 __ Madd(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
807 i.InputRegister(2));
808 break;
809 case kArm64Madd32:
810 __ Madd(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1),
811 i.InputRegister32(2));
812 break;
813 case kArm64Msub:
814 __ Msub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
815 i.InputRegister(2));
816 break;
817 case kArm64Msub32:
818 __ Msub(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1),
819 i.InputRegister32(2));
820 break;
821 case kArm64Mneg:
822 __ Mneg(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
823 break;
824 case kArm64Mneg32:
825 __ Mneg(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
826 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000827 case kArm64Idiv:
828 __ Sdiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
829 break;
830 case kArm64Idiv32:
831 __ Sdiv(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
832 break;
833 case kArm64Udiv:
834 __ Udiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
835 break;
836 case kArm64Udiv32:
837 __ Udiv(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
838 break;
839 case kArm64Imod: {
840 UseScratchRegisterScope scope(masm());
841 Register temp = scope.AcquireX();
842 __ Sdiv(temp, i.InputRegister(0), i.InputRegister(1));
843 __ Msub(i.OutputRegister(), temp, i.InputRegister(1), i.InputRegister(0));
844 break;
845 }
846 case kArm64Imod32: {
847 UseScratchRegisterScope scope(masm());
848 Register temp = scope.AcquireW();
849 __ Sdiv(temp, i.InputRegister32(0), i.InputRegister32(1));
850 __ Msub(i.OutputRegister32(), temp, i.InputRegister32(1),
851 i.InputRegister32(0));
852 break;
853 }
854 case kArm64Umod: {
855 UseScratchRegisterScope scope(masm());
856 Register temp = scope.AcquireX();
857 __ Udiv(temp, i.InputRegister(0), i.InputRegister(1));
858 __ Msub(i.OutputRegister(), temp, i.InputRegister(1), i.InputRegister(0));
859 break;
860 }
861 case kArm64Umod32: {
862 UseScratchRegisterScope scope(masm());
863 Register temp = scope.AcquireW();
864 __ Udiv(temp, i.InputRegister32(0), i.InputRegister32(1));
865 __ Msub(i.OutputRegister32(), temp, i.InputRegister32(1),
866 i.InputRegister32(0));
867 break;
868 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000869 case kArm64Not:
Ben Murdoch014dc512016-03-22 12:00:34 +0000870 __ Mvn(i.OutputRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000871 break;
872 case kArm64Not32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000873 __ Mvn(i.OutputRegister32(), i.InputOperand32(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000874 break;
875 case kArm64Or:
Ben Murdoch014dc512016-03-22 12:00:34 +0000876 __ Orr(i.OutputRegister(), i.InputOrZeroRegister64(0),
877 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000878 break;
879 case kArm64Or32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000880 __ Orr(i.OutputRegister32(), i.InputOrZeroRegister32(0),
881 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000882 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400883 case kArm64Orn:
Ben Murdoch014dc512016-03-22 12:00:34 +0000884 __ Orn(i.OutputRegister(), i.InputOrZeroRegister64(0),
885 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400887 case kArm64Orn32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000888 __ Orn(i.OutputRegister32(), i.InputOrZeroRegister32(0),
889 i.InputOperand2_32(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400890 break;
891 case kArm64Eor:
Ben Murdoch014dc512016-03-22 12:00:34 +0000892 __ Eor(i.OutputRegister(), i.InputOrZeroRegister64(0),
893 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400894 break;
895 case kArm64Eor32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000896 __ Eor(i.OutputRegister32(), i.InputOrZeroRegister32(0),
897 i.InputOperand2_32(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400898 break;
899 case kArm64Eon:
Ben Murdoch014dc512016-03-22 12:00:34 +0000900 __ Eon(i.OutputRegister(), i.InputOrZeroRegister64(0),
901 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400902 break;
903 case kArm64Eon32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000904 __ Eon(i.OutputRegister32(), i.InputOrZeroRegister32(0),
905 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000906 break;
907 case kArm64Sub:
Ben Murdoch014dc512016-03-22 12:00:34 +0000908 if (FlagsModeField::decode(opcode) != kFlags_none) {
909 __ Subs(i.OutputRegister(), i.InputOrZeroRegister64(0),
910 i.InputOperand2_64(1));
911 } else {
912 __ Sub(i.OutputRegister(), i.InputOrZeroRegister64(0),
913 i.InputOperand2_64(1));
914 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000915 break;
916 case kArm64Sub32:
917 if (FlagsModeField::decode(opcode) != kFlags_none) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000918 __ Subs(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400919 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000920 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +0000921 __ Sub(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400922 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000923 }
924 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400925 case kArm64Lsl:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000926 ASSEMBLE_SHIFT(Lsl, 64);
927 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400928 case kArm64Lsl32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000929 ASSEMBLE_SHIFT(Lsl, 32);
930 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400931 case kArm64Lsr:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000932 ASSEMBLE_SHIFT(Lsr, 64);
933 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400934 case kArm64Lsr32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000935 ASSEMBLE_SHIFT(Lsr, 32);
936 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400937 case kArm64Asr:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000938 ASSEMBLE_SHIFT(Asr, 64);
939 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400940 case kArm64Asr32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000941 ASSEMBLE_SHIFT(Asr, 32);
942 break;
943 case kArm64Ror:
944 ASSEMBLE_SHIFT(Ror, 64);
945 break;
946 case kArm64Ror32:
947 ASSEMBLE_SHIFT(Ror, 32);
948 break;
949 case kArm64Mov32:
950 __ Mov(i.OutputRegister32(), i.InputRegister32(0));
951 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400952 case kArm64Sxtb32:
953 __ Sxtb(i.OutputRegister32(), i.InputRegister32(0));
954 break;
955 case kArm64Sxth32:
956 __ Sxth(i.OutputRegister32(), i.InputRegister32(0));
957 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000958 case kArm64Sxtw:
959 __ Sxtw(i.OutputRegister(), i.InputRegister32(0));
960 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000961 case kArm64Sbfx32:
962 __ Sbfx(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
963 i.InputInt5(2));
964 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400965 case kArm64Ubfx:
Ben Murdoch014dc512016-03-22 12:00:34 +0000966 __ Ubfx(i.OutputRegister(), i.InputRegister(0), i.InputInt6(1),
967 i.InputInt6(2));
Emily Bernier958fae72015-03-24 16:35:39 -0400968 break;
969 case kArm64Ubfx32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000970 __ Ubfx(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
971 i.InputInt5(2));
972 break;
973 case kArm64Ubfiz32:
974 __ Ubfiz(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
975 i.InputInt5(2));
976 break;
977 case kArm64Bfi:
978 __ Bfi(i.OutputRegister(), i.InputRegister(1), i.InputInt6(2),
979 i.InputInt6(3));
Emily Bernier958fae72015-03-24 16:35:39 -0400980 break;
981 case kArm64TestAndBranch32:
982 case kArm64TestAndBranch:
983 // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch.
984 break;
985 case kArm64CompareAndBranch32:
986 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
987 break;
Ben Murdoch109988c2016-05-18 11:27:45 +0100988 case kArm64ClaimCSP: {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100989 int count = RoundUp(i.InputInt32(0), 2);
Ben Murdoch109988c2016-05-18 11:27:45 +0100990 Register prev = __ StackPointer();
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100991 if (prev.Is(jssp)) {
992 // TODO(titzer): make this a macro-assembler method.
993 // Align the CSP and store the previous JSSP on the stack.
994 UseScratchRegisterScope scope(masm());
995 Register tmp = scope.AcquireX();
996
997 int sp_alignment = __ ActivationFrameAlignment();
998 __ Sub(tmp, jssp, kPointerSize);
999 __ And(tmp, tmp, Operand(~static_cast<uint64_t>(sp_alignment - 1)));
1000 __ Mov(csp, tmp);
1001 __ Str(jssp, MemOperand(csp));
1002 if (count > 0) {
1003 __ SetStackPointer(csp);
1004 __ Claim(count);
1005 __ SetStackPointer(prev);
1006 }
1007 } else {
1008 __ AssertCspAligned();
1009 if (count > 0) {
1010 __ Claim(count);
1011 frame_access_state()->IncreaseSPDelta(count);
1012 }
1013 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001014 break;
1015 }
Ben Murdoch109988c2016-05-18 11:27:45 +01001016 case kArm64ClaimJSSP: {
1017 int count = i.InputInt32(0);
1018 if (csp.Is(__ StackPointer())) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001019 // No JSSP is set up. Compute it from the CSP.
1020 __ AssertCspAligned();
1021 if (count > 0) {
1022 int even = RoundUp(count, 2);
1023 __ Sub(jssp, csp, count * kPointerSize);
1024 __ Sub(csp, csp, even * kPointerSize); // Must always be aligned.
1025 frame_access_state()->IncreaseSPDelta(even);
1026 } else {
1027 __ Mov(jssp, csp);
1028 }
Ben Murdoch109988c2016-05-18 11:27:45 +01001029 } else {
1030 // JSSP is the current stack pointer, just use regular Claim().
1031 __ Claim(count);
1032 frame_access_state()->IncreaseSPDelta(count);
1033 }
1034 break;
1035 }
1036 case kArm64PokeCSP: // fall through
1037 case kArm64PokeJSSP: {
1038 Register prev = __ StackPointer();
1039 __ SetStackPointer(arch_opcode == kArm64PokeCSP ? csp : jssp);
Ben Murdoch014dc512016-03-22 12:00:34 +00001040 Operand operand(i.InputInt32(1) * kPointerSize);
1041 if (instr->InputAt(0)->IsDoubleRegister()) {
1042 __ Poke(i.InputFloat64Register(0), operand);
1043 } else {
1044 __ Poke(i.InputRegister(0), operand);
1045 }
Ben Murdoch109988c2016-05-18 11:27:45 +01001046 __ SetStackPointer(prev);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001047 break;
1048 }
1049 case kArm64PokePair: {
Ben Murdoch014dc512016-03-22 12:00:34 +00001050 int slot = i.InputInt32(2) - 1;
1051 if (instr->InputAt(0)->IsDoubleRegister()) {
1052 __ PokePair(i.InputFloat64Register(1), i.InputFloat64Register(0),
1053 slot * kPointerSize);
1054 } else {
1055 __ PokePair(i.InputRegister(1), i.InputRegister(0),
1056 slot * kPointerSize);
1057 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001058 break;
1059 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001060 case kArm64Clz:
1061 __ Clz(i.OutputRegister64(), i.InputRegister64(0));
1062 break;
1063 case kArm64Clz32:
1064 __ Clz(i.OutputRegister32(), i.InputRegister32(0));
1065 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001066 case kArm64Rbit:
1067 __ Rbit(i.OutputRegister64(), i.InputRegister64(0));
1068 break;
1069 case kArm64Rbit32:
1070 __ Rbit(i.OutputRegister32(), i.InputRegister32(0));
1071 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001072 case kArm64Cmp:
Ben Murdoch014dc512016-03-22 12:00:34 +00001073 __ Cmp(i.InputOrZeroRegister64(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001074 break;
1075 case kArm64Cmp32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001076 __ Cmp(i.InputOrZeroRegister32(0), i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001077 break;
1078 case kArm64Cmn:
Ben Murdoch014dc512016-03-22 12:00:34 +00001079 __ Cmn(i.InputOrZeroRegister64(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001080 break;
1081 case kArm64Cmn32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001082 __ Cmn(i.InputOrZeroRegister32(0), i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001083 break;
1084 case kArm64Tst:
1085 __ Tst(i.InputRegister(0), i.InputOperand(1));
1086 break;
1087 case kArm64Tst32:
1088 __ Tst(i.InputRegister32(0), i.InputOperand32(1));
1089 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001090 case kArm64Float32Cmp:
1091 if (instr->InputAt(1)->IsDoubleRegister()) {
1092 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32Register(1));
1093 } else {
1094 DCHECK(instr->InputAt(1)->IsImmediate());
1095 // 0.0 is the only immediate supported by fcmp instructions.
1096 DCHECK(i.InputFloat32(1) == 0.0f);
1097 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32(1));
1098 }
1099 break;
1100 case kArm64Float32Add:
1101 __ Fadd(i.OutputFloat32Register(), i.InputFloat32Register(0),
1102 i.InputFloat32Register(1));
1103 break;
1104 case kArm64Float32Sub:
1105 __ Fsub(i.OutputFloat32Register(), i.InputFloat32Register(0),
1106 i.InputFloat32Register(1));
1107 break;
1108 case kArm64Float32Mul:
1109 __ Fmul(i.OutputFloat32Register(), i.InputFloat32Register(0),
1110 i.InputFloat32Register(1));
1111 break;
1112 case kArm64Float32Div:
1113 __ Fdiv(i.OutputFloat32Register(), i.InputFloat32Register(0),
1114 i.InputFloat32Register(1));
1115 break;
1116 case kArm64Float32Max:
1117 // (b < a) ? a : b
1118 __ Fcmp(i.InputFloat32Register(1), i.InputFloat32Register(0));
1119 __ Fcsel(i.OutputFloat32Register(), i.InputFloat32Register(0),
1120 i.InputFloat32Register(1), lo);
1121 break;
1122 case kArm64Float32Min:
1123 // (a < b) ? a : b
1124 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32Register(1));
1125 __ Fcsel(i.OutputFloat32Register(), i.InputFloat32Register(0),
1126 i.InputFloat32Register(1), lo);
1127 break;
1128 case kArm64Float32Abs:
1129 __ Fabs(i.OutputFloat32Register(), i.InputFloat32Register(0));
1130 break;
1131 case kArm64Float32Sqrt:
1132 __ Fsqrt(i.OutputFloat32Register(), i.InputFloat32Register(0));
1133 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001134 case kArm64Float64Cmp:
Ben Murdoch014dc512016-03-22 12:00:34 +00001135 if (instr->InputAt(1)->IsDoubleRegister()) {
1136 __ Fcmp(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1137 } else {
1138 DCHECK(instr->InputAt(1)->IsImmediate());
1139 // 0.0 is the only immediate supported by fcmp instructions.
1140 DCHECK(i.InputDouble(1) == 0.0);
1141 __ Fcmp(i.InputDoubleRegister(0), i.InputDouble(1));
1142 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001143 break;
1144 case kArm64Float64Add:
1145 __ Fadd(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1146 i.InputDoubleRegister(1));
1147 break;
1148 case kArm64Float64Sub:
1149 __ Fsub(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1150 i.InputDoubleRegister(1));
1151 break;
1152 case kArm64Float64Mul:
1153 __ Fmul(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1154 i.InputDoubleRegister(1));
1155 break;
1156 case kArm64Float64Div:
1157 __ Fdiv(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1158 i.InputDoubleRegister(1));
1159 break;
1160 case kArm64Float64Mod: {
1161 // TODO(dcarney): implement directly. See note in lithium-codegen-arm64.cc
1162 FrameScope scope(masm(), StackFrame::MANUAL);
1163 DCHECK(d0.is(i.InputDoubleRegister(0)));
1164 DCHECK(d1.is(i.InputDoubleRegister(1)));
1165 DCHECK(d0.is(i.OutputDoubleRegister()));
1166 // TODO(dcarney): make sure this saves all relevant registers.
1167 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
1168 0, 2);
1169 break;
1170 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001171 case kArm64Float64Max:
1172 // (b < a) ? a : b
1173 __ Fcmp(i.InputDoubleRegister(1), i.InputDoubleRegister(0));
1174 __ Fcsel(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1175 i.InputDoubleRegister(1), lo);
1176 break;
1177 case kArm64Float64Min:
1178 // (a < b) ? a : b
1179 __ Fcmp(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1180 __ Fcsel(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1181 i.InputDoubleRegister(1), lo);
1182 break;
1183 case kArm64Float64Abs:
1184 __ Fabs(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1185 break;
1186 case kArm64Float64Neg:
1187 __ Fneg(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1188 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001189 case kArm64Float64Sqrt:
1190 __ Fsqrt(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1191 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001192 case kArm64Float32ToFloat64:
1193 __ Fcvt(i.OutputDoubleRegister(), i.InputDoubleRegister(0).S());
1194 break;
1195 case kArm64Float64ToFloat32:
1196 __ Fcvt(i.OutputDoubleRegister().S(), i.InputDoubleRegister(0));
1197 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001198 case kArm64Float32ToInt32:
1199 __ Fcvtzs(i.OutputRegister32(), i.InputFloat32Register(0));
1200 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001201 case kArm64Float64ToInt32:
1202 __ Fcvtzs(i.OutputRegister32(), i.InputDoubleRegister(0));
1203 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001204 case kArm64Float32ToUint32:
1205 __ Fcvtzu(i.OutputRegister32(), i.InputFloat32Register(0));
1206 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001207 case kArm64Float64ToUint32:
1208 __ Fcvtzu(i.OutputRegister32(), i.InputDoubleRegister(0));
1209 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001210 case kArm64Float32ToInt64:
1211 __ Fcvtzs(i.OutputRegister64(), i.InputFloat32Register(0));
1212 if (i.OutputCount() > 1) {
1213 __ Mov(i.OutputRegister(1), 1);
1214 Label done;
1215 __ Cmp(i.OutputRegister(0), 1);
1216 __ Ccmp(i.OutputRegister(0), -1, VFlag, vc);
1217 __ Fccmp(i.InputFloat32Register(0), i.InputFloat32Register(0), VFlag,
1218 vc);
1219 __ B(vc, &done);
1220 __ Fcmp(i.InputFloat32Register(0), static_cast<float>(INT64_MIN));
1221 __ Cset(i.OutputRegister(1), eq);
1222 __ Bind(&done);
1223 }
1224 break;
1225 case kArm64Float64ToInt64:
1226 __ Fcvtzs(i.OutputRegister(0), i.InputDoubleRegister(0));
1227 if (i.OutputCount() > 1) {
1228 __ Mov(i.OutputRegister(1), 1);
1229 Label done;
1230 __ Cmp(i.OutputRegister(0), 1);
1231 __ Ccmp(i.OutputRegister(0), -1, VFlag, vc);
1232 __ Fccmp(i.InputDoubleRegister(0), i.InputDoubleRegister(0), VFlag, vc);
1233 __ B(vc, &done);
1234 __ Fcmp(i.InputDoubleRegister(0), static_cast<double>(INT64_MIN));
1235 __ Cset(i.OutputRegister(1), eq);
1236 __ Bind(&done);
1237 }
1238 break;
1239 case kArm64Float32ToUint64:
1240 __ Fcvtzu(i.OutputRegister64(), i.InputFloat32Register(0));
1241 if (i.OutputCount() > 1) {
1242 __ Fcmp(i.InputFloat32Register(0), -1.0);
1243 __ Ccmp(i.OutputRegister(0), -1, ZFlag, gt);
1244 __ Cset(i.OutputRegister(1), ne);
1245 }
1246 break;
1247 case kArm64Float64ToUint64:
1248 __ Fcvtzu(i.OutputRegister64(), i.InputDoubleRegister(0));
1249 if (i.OutputCount() > 1) {
1250 __ Fcmp(i.InputDoubleRegister(0), -1.0);
1251 __ Ccmp(i.OutputRegister(0), -1, ZFlag, gt);
1252 __ Cset(i.OutputRegister(1), ne);
1253 }
1254 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001255 case kArm64Int32ToFloat32:
1256 __ Scvtf(i.OutputFloat32Register(), i.InputRegister32(0));
1257 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001258 case kArm64Int32ToFloat64:
1259 __ Scvtf(i.OutputDoubleRegister(), i.InputRegister32(0));
1260 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001261 case kArm64Int64ToFloat32:
1262 __ Scvtf(i.OutputDoubleRegister().S(), i.InputRegister64(0));
1263 break;
1264 case kArm64Int64ToFloat64:
1265 __ Scvtf(i.OutputDoubleRegister(), i.InputRegister64(0));
1266 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001267 case kArm64Uint32ToFloat32:
1268 __ Ucvtf(i.OutputFloat32Register(), i.InputRegister32(0));
1269 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001270 case kArm64Uint32ToFloat64:
1271 __ Ucvtf(i.OutputDoubleRegister(), i.InputRegister32(0));
1272 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001273 case kArm64Uint64ToFloat32:
1274 __ Ucvtf(i.OutputDoubleRegister().S(), i.InputRegister64(0));
1275 break;
1276 case kArm64Uint64ToFloat64:
1277 __ Ucvtf(i.OutputDoubleRegister(), i.InputRegister64(0));
1278 break;
1279 case kArm64Float64ExtractLowWord32:
1280 __ Fmov(i.OutputRegister32(), i.InputFloat32Register(0));
1281 break;
1282 case kArm64Float64ExtractHighWord32:
1283 // TODO(arm64): This should use MOV (to general) when NEON is supported.
1284 __ Fmov(i.OutputRegister(), i.InputFloat64Register(0));
1285 __ Lsr(i.OutputRegister(), i.OutputRegister(), 32);
1286 break;
1287 case kArm64Float64InsertLowWord32: {
1288 // TODO(arm64): This should use MOV (from general) when NEON is supported.
1289 UseScratchRegisterScope scope(masm());
1290 Register tmp = scope.AcquireX();
1291 __ Fmov(tmp, i.InputFloat64Register(0));
1292 __ Bfi(tmp, i.InputRegister(1), 0, 32);
1293 __ Fmov(i.OutputFloat64Register(), tmp);
1294 break;
1295 }
1296 case kArm64Float64InsertHighWord32: {
1297 // TODO(arm64): This should use MOV (from general) when NEON is supported.
1298 UseScratchRegisterScope scope(masm());
1299 Register tmp = scope.AcquireX();
1300 __ Fmov(tmp.W(), i.InputFloat32Register(0));
1301 __ Bfi(tmp, i.InputRegister(1), 32, 32);
1302 __ Fmov(i.OutputFloat64Register(), tmp);
1303 break;
1304 }
1305 case kArm64Float64MoveU64:
1306 __ Fmov(i.OutputFloat64Register(), i.InputRegister(0));
1307 break;
1308 case kArm64U64MoveFloat64:
1309 __ Fmov(i.OutputRegister(), i.InputDoubleRegister(0));
1310 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001311 case kArm64Ldrb:
1312 __ Ldrb(i.OutputRegister(), i.MemoryOperand());
1313 break;
1314 case kArm64Ldrsb:
1315 __ Ldrsb(i.OutputRegister(), i.MemoryOperand());
1316 break;
1317 case kArm64Strb:
1318 __ Strb(i.InputRegister(2), i.MemoryOperand());
1319 break;
1320 case kArm64Ldrh:
1321 __ Ldrh(i.OutputRegister(), i.MemoryOperand());
1322 break;
1323 case kArm64Ldrsh:
1324 __ Ldrsh(i.OutputRegister(), i.MemoryOperand());
1325 break;
1326 case kArm64Strh:
1327 __ Strh(i.InputRegister(2), i.MemoryOperand());
1328 break;
1329 case kArm64LdrW:
1330 __ Ldr(i.OutputRegister32(), i.MemoryOperand());
1331 break;
1332 case kArm64StrW:
1333 __ Str(i.InputRegister32(2), i.MemoryOperand());
1334 break;
1335 case kArm64Ldr:
1336 __ Ldr(i.OutputRegister(), i.MemoryOperand());
1337 break;
1338 case kArm64Str:
1339 __ Str(i.InputRegister(2), i.MemoryOperand());
1340 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001341 case kArm64LdrS:
1342 __ Ldr(i.OutputDoubleRegister().S(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001343 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001344 case kArm64StrS:
1345 __ Str(i.InputDoubleRegister(2).S(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001346 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001347 case kArm64LdrD:
1348 __ Ldr(i.OutputDoubleRegister(), i.MemoryOperand());
1349 break;
1350 case kArm64StrD:
1351 __ Str(i.InputDoubleRegister(2), i.MemoryOperand());
1352 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001353 case kCheckedLoadInt8:
1354 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrsb);
1355 break;
1356 case kCheckedLoadUint8:
1357 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrb);
1358 break;
1359 case kCheckedLoadInt16:
1360 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrsh);
1361 break;
1362 case kCheckedLoadUint16:
1363 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrh);
1364 break;
1365 case kCheckedLoadWord32:
1366 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldr);
1367 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001368 case kCheckedLoadWord64:
1369 ASSEMBLE_CHECKED_LOAD_INTEGER_64(Ldr);
1370 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001371 case kCheckedLoadFloat32:
1372 ASSEMBLE_CHECKED_LOAD_FLOAT(32);
1373 break;
1374 case kCheckedLoadFloat64:
1375 ASSEMBLE_CHECKED_LOAD_FLOAT(64);
1376 break;
1377 case kCheckedStoreWord8:
1378 ASSEMBLE_CHECKED_STORE_INTEGER(Strb);
1379 break;
1380 case kCheckedStoreWord16:
1381 ASSEMBLE_CHECKED_STORE_INTEGER(Strh);
1382 break;
1383 case kCheckedStoreWord32:
1384 ASSEMBLE_CHECKED_STORE_INTEGER(Str);
1385 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001386 case kCheckedStoreWord64:
1387 ASSEMBLE_CHECKED_STORE_INTEGER_64(Str);
1388 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001389 case kCheckedStoreFloat32:
1390 ASSEMBLE_CHECKED_STORE_FLOAT(32);
1391 break;
1392 case kCheckedStoreFloat64:
1393 ASSEMBLE_CHECKED_STORE_FLOAT(64);
1394 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001395 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001396} // NOLINT(readability/fn_size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001397
1398
1399// Assemble branches after this instruction.
Emily Bernier958fae72015-03-24 16:35:39 -04001400void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001401 Arm64OperandConverter i(this, instr);
Emily Bernier958fae72015-03-24 16:35:39 -04001402 Label* tlabel = branch->true_label;
1403 Label* flabel = branch->false_label;
1404 FlagsCondition condition = branch->condition;
1405 ArchOpcode opcode = instr->arch_opcode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001406
Emily Bernier958fae72015-03-24 16:35:39 -04001407 if (opcode == kArm64CompareAndBranch32) {
1408 switch (condition) {
1409 case kEqual:
1410 __ Cbz(i.InputRegister32(0), tlabel);
1411 break;
1412 case kNotEqual:
1413 __ Cbnz(i.InputRegister32(0), tlabel);
1414 break;
1415 default:
1416 UNREACHABLE();
1417 }
1418 } else if (opcode == kArm64TestAndBranch32) {
1419 switch (condition) {
1420 case kEqual:
1421 __ Tbz(i.InputRegister32(0), i.InputInt5(1), tlabel);
1422 break;
1423 case kNotEqual:
1424 __ Tbnz(i.InputRegister32(0), i.InputInt5(1), tlabel);
1425 break;
1426 default:
1427 UNREACHABLE();
1428 }
1429 } else if (opcode == kArm64TestAndBranch) {
1430 switch (condition) {
1431 case kEqual:
1432 __ Tbz(i.InputRegister64(0), i.InputInt6(1), tlabel);
1433 break;
1434 case kNotEqual:
1435 __ Tbnz(i.InputRegister64(0), i.InputInt6(1), tlabel);
1436 break;
1437 default:
1438 UNREACHABLE();
1439 }
1440 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +00001441 Condition cc = FlagsConditionToCondition(condition);
1442 __ B(cc, tlabel);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001443 }
Emily Bernier958fae72015-03-24 16:35:39 -04001444 if (!branch->fallthru) __ B(flabel); // no fallthru to flabel.
1445}
1446
1447
Ben Murdoch014dc512016-03-22 12:00:34 +00001448void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernier958fae72015-03-24 16:35:39 -04001449 if (!IsNextInAssemblyOrder(target)) __ B(GetLabel(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001450}
1451
1452
1453// Assemble boolean materializations after this instruction.
1454void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1455 FlagsCondition condition) {
1456 Arm64OperandConverter i(this, instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001457
1458 // Materialize a full 64-bit 1 or 0 value. The result register is always the
1459 // last output of the instruction.
Ben Murdoch014dc512016-03-22 12:00:34 +00001460 DCHECK_NE(0u, instr->OutputCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001461 Register reg = i.OutputRegister(instr->OutputCount() - 1);
Ben Murdoch014dc512016-03-22 12:00:34 +00001462 Condition cc = FlagsConditionToCondition(condition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001463 __ Cset(reg, cc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001464}
1465
1466
Ben Murdoch014dc512016-03-22 12:00:34 +00001467void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1468 Arm64OperandConverter i(this, instr);
1469 Register input = i.InputRegister32(0);
1470 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1471 __ Cmp(input, i.InputInt32(index + 0));
1472 __ B(eq, GetLabel(i.InputRpo(index + 1)));
1473 }
1474 AssembleArchJump(i.InputRpo(1));
1475}
1476
1477
1478void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1479 Arm64OperandConverter i(this, instr);
1480 UseScratchRegisterScope scope(masm());
1481 Register input = i.InputRegister32(0);
1482 Register temp = scope.AcquireX();
1483 size_t const case_count = instr->InputCount() - 2;
1484 Label table;
1485 __ Cmp(input, case_count);
1486 __ B(hs, GetLabel(i.InputRpo(1)));
1487 __ Adr(temp, &table);
1488 __ Add(temp, temp, Operand(input, UXTW, 2));
1489 __ Br(temp);
1490 __ StartBlockPools();
1491 __ Bind(&table);
1492 for (size_t index = 0; index < case_count; ++index) {
1493 __ B(GetLabel(i.InputRpo(index + 2)));
1494 }
1495 __ EndBlockPools();
1496}
1497
1498
1499void CodeGenerator::AssembleDeoptimizerCall(
1500 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001501 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch014dc512016-03-22 12:00:34 +00001502 isolate(), deoptimization_id, bailout_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001503 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1504}
1505
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001506void CodeGenerator::AssembleSetupStackPointer() {
1507 const CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1508 if (descriptor->UseNativeStack() || descriptor->IsCFunctionCall()) {
1509 __ SetStackPointer(csp);
1510 } else {
1511 __ SetStackPointer(jssp);
1512 }
1513}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001514
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001515void CodeGenerator::AssemblePrologue() {
1516 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001517 if (descriptor->UseNativeStack()) {
1518 __ AssertCspAligned();
Ben Murdoch014dc512016-03-22 12:00:34 +00001519 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001520
1521 int stack_shrink_slots = frame()->GetSpillSlotCount();
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001522 if (frame_access_state()->has_frame()) {
1523 if (descriptor->IsJSFunctionCall()) {
1524 DCHECK(!descriptor->UseNativeStack());
1525 __ Prologue(this->info()->GeneratePreagedPrologue());
1526 } else {
1527 if (descriptor->IsCFunctionCall()) {
1528 __ Push(lr, fp);
1529 __ Mov(fp, masm_.StackPointer());
1530 __ Claim(stack_shrink_slots);
1531 } else {
1532 __ StubPrologue(info()->GetOutputStackFrameType(),
1533 frame()->GetTotalFrameSlotCount());
1534 }
1535 }
1536 }
1537
Ben Murdoch014dc512016-03-22 12:00:34 +00001538 if (info()->is_osr()) {
1539 // TurboFan OSR-compiled functions cannot be entered directly.
1540 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1541
1542 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1543 // frame is still on the stack. Optimized code uses OSR values directly from
1544 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1545 // remaining stack slots.
1546 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1547 osr_pc_offset_ = __ pc_offset();
Ben Murdoch014dc512016-03-22 12:00:34 +00001548 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
1549 }
1550
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001551 if (descriptor->IsJSFunctionCall()) {
1552 __ Claim(stack_shrink_slots);
Ben Murdoch014dc512016-03-22 12:00:34 +00001553 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001554
1555 // Save FP registers.
1556 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1557 descriptor->CalleeSavedFPRegisters());
1558 int saved_count = saves_fp.Count();
1559 if (saved_count != 0) {
1560 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list());
1561 __ PushCPURegList(saves_fp);
1562 frame()->AllocateSavedCalleeRegisterSlots(saved_count *
1563 (kDoubleSize / kPointerSize));
1564 }
1565 // Save registers.
1566 // TODO(palfia): TF save list is not in sync with
1567 // CPURegList::GetCalleeSaved(): x30 is missing.
1568 // DCHECK(saves.list() == CPURegList::GetCalleeSaved().list());
1569 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1570 descriptor->CalleeSavedRegisters());
1571 saved_count = saves.Count();
1572 if (saved_count != 0) {
1573 __ PushCPURegList(saves);
1574 frame()->AllocateSavedCalleeRegisterSlots(saved_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001575 }
1576}
1577
1578
1579void CodeGenerator::AssembleReturn() {
1580 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch014dc512016-03-22 12:00:34 +00001581
1582 // Restore registers.
1583 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1584 descriptor->CalleeSavedRegisters());
1585 if (saves.Count() != 0) {
1586 __ PopCPURegList(saves);
1587 }
1588
1589 // Restore fp registers.
1590 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1591 descriptor->CalleeSavedFPRegisters());
1592 if (saves_fp.Count() != 0) {
1593 __ PopCPURegList(saves_fp);
1594 }
1595
1596 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1597 if (descriptor->IsCFunctionCall()) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001598 AssembleDeconstructFrame();
1599 } else if (frame_access_state()->has_frame()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001600 // Canonicalize JSFunction return sites for now.
1601 if (return_label_.is_bound()) {
1602 __ B(&return_label_);
1603 return;
1604 } else {
1605 __ Bind(&return_label_);
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001606 AssembleDeconstructFrame();
Ben Murdoch014dc512016-03-22 12:00:34 +00001607 if (descriptor->UseNativeStack()) {
Ben Murdoch109988c2016-05-18 11:27:45 +01001608 pop_count += (pop_count & 1); // align
Ben Murdoch014dc512016-03-22 12:00:34 +00001609 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001610 }
1611 } else if (descriptor->UseNativeStack()) {
Ben Murdoch109988c2016-05-18 11:27:45 +01001612 pop_count += (pop_count & 1); // align
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001613 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001614 __ Drop(pop_count);
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001615
1616 if (descriptor->UseNativeStack()) {
1617 __ AssertCspAligned();
1618 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001619 __ Ret();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001620}
1621
1622
1623void CodeGenerator::AssembleMove(InstructionOperand* source,
1624 InstructionOperand* destination) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001625 Arm64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001626 // Dispatch on the source and destination operand kinds. Not all
1627 // combinations are possible.
1628 if (source->IsRegister()) {
1629 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1630 Register src = g.ToRegister(source);
1631 if (destination->IsRegister()) {
1632 __ Mov(g.ToRegister(destination), src);
1633 } else {
1634 __ Str(src, g.ToMemOperand(destination, masm()));
1635 }
1636 } else if (source->IsStackSlot()) {
1637 MemOperand src = g.ToMemOperand(source, masm());
1638 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1639 if (destination->IsRegister()) {
1640 __ Ldr(g.ToRegister(destination), src);
1641 } else {
1642 UseScratchRegisterScope scope(masm());
1643 Register temp = scope.AcquireX();
1644 __ Ldr(temp, src);
1645 __ Str(temp, g.ToMemOperand(destination, masm()));
1646 }
1647 } else if (source->IsConstant()) {
Emily Bernier958fae72015-03-24 16:35:39 -04001648 Constant src = g.ToConstant(ConstantOperand::cast(source));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001649 if (destination->IsRegister() || destination->IsStackSlot()) {
1650 UseScratchRegisterScope scope(masm());
1651 Register dst = destination->IsRegister() ? g.ToRegister(destination)
1652 : scope.AcquireX();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001653 if (src.type() == Constant::kHeapObject) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001654 Handle<HeapObject> src_object = src.ToHeapObject();
1655 Heap::RootListIndex index;
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001656 int slot;
1657 if (IsMaterializableFromFrame(src_object, &slot)) {
1658 __ Ldr(dst, g.SlotToMemOperand(slot, masm()));
Ben Murdoch014dc512016-03-22 12:00:34 +00001659 } else if (IsMaterializableFromRoot(src_object, &index)) {
1660 __ LoadRoot(dst, index);
1661 } else {
1662 __ LoadObject(dst, src_object);
1663 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001664 } else {
1665 __ Mov(dst, g.ToImmediate(source));
1666 }
1667 if (destination->IsStackSlot()) {
1668 __ Str(dst, g.ToMemOperand(destination, masm()));
1669 }
Emily Bernier958fae72015-03-24 16:35:39 -04001670 } else if (src.type() == Constant::kFloat32) {
1671 if (destination->IsDoubleRegister()) {
1672 FPRegister dst = g.ToDoubleRegister(destination).S();
1673 __ Fmov(dst, src.ToFloat32());
1674 } else {
1675 DCHECK(destination->IsDoubleStackSlot());
1676 UseScratchRegisterScope scope(masm());
1677 FPRegister temp = scope.AcquireS();
1678 __ Fmov(temp, src.ToFloat32());
1679 __ Str(temp, g.ToMemOperand(destination, masm()));
1680 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001681 } else {
Emily Bernier958fae72015-03-24 16:35:39 -04001682 DCHECK_EQ(Constant::kFloat64, src.type());
1683 if (destination->IsDoubleRegister()) {
1684 FPRegister dst = g.ToDoubleRegister(destination);
1685 __ Fmov(dst, src.ToFloat64());
1686 } else {
1687 DCHECK(destination->IsDoubleStackSlot());
1688 UseScratchRegisterScope scope(masm());
1689 FPRegister temp = scope.AcquireD();
1690 __ Fmov(temp, src.ToFloat64());
1691 __ Str(temp, g.ToMemOperand(destination, masm()));
1692 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001693 }
1694 } else if (source->IsDoubleRegister()) {
1695 FPRegister src = g.ToDoubleRegister(source);
1696 if (destination->IsDoubleRegister()) {
1697 FPRegister dst = g.ToDoubleRegister(destination);
1698 __ Fmov(dst, src);
1699 } else {
1700 DCHECK(destination->IsDoubleStackSlot());
1701 __ Str(src, g.ToMemOperand(destination, masm()));
1702 }
1703 } else if (source->IsDoubleStackSlot()) {
1704 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot());
1705 MemOperand src = g.ToMemOperand(source, masm());
1706 if (destination->IsDoubleRegister()) {
1707 __ Ldr(g.ToDoubleRegister(destination), src);
1708 } else {
1709 UseScratchRegisterScope scope(masm());
1710 FPRegister temp = scope.AcquireD();
1711 __ Ldr(temp, src);
1712 __ Str(temp, g.ToMemOperand(destination, masm()));
1713 }
1714 } else {
1715 UNREACHABLE();
1716 }
1717}
1718
1719
1720void CodeGenerator::AssembleSwap(InstructionOperand* source,
1721 InstructionOperand* destination) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001722 Arm64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001723 // Dispatch on the source and destination operand kinds. Not all
1724 // combinations are possible.
1725 if (source->IsRegister()) {
1726 // Register-register.
1727 UseScratchRegisterScope scope(masm());
1728 Register temp = scope.AcquireX();
1729 Register src = g.ToRegister(source);
1730 if (destination->IsRegister()) {
1731 Register dst = g.ToRegister(destination);
1732 __ Mov(temp, src);
1733 __ Mov(src, dst);
1734 __ Mov(dst, temp);
1735 } else {
1736 DCHECK(destination->IsStackSlot());
1737 MemOperand dst = g.ToMemOperand(destination, masm());
1738 __ Mov(temp, src);
1739 __ Ldr(src, dst);
1740 __ Str(temp, dst);
1741 }
1742 } else if (source->IsStackSlot() || source->IsDoubleStackSlot()) {
1743 UseScratchRegisterScope scope(masm());
Emily Bernier958fae72015-03-24 16:35:39 -04001744 DoubleRegister temp_0 = scope.AcquireD();
1745 DoubleRegister temp_1 = scope.AcquireD();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001746 MemOperand src = g.ToMemOperand(source, masm());
1747 MemOperand dst = g.ToMemOperand(destination, masm());
1748 __ Ldr(temp_0, src);
1749 __ Ldr(temp_1, dst);
1750 __ Str(temp_0, dst);
1751 __ Str(temp_1, src);
1752 } else if (source->IsDoubleRegister()) {
1753 UseScratchRegisterScope scope(masm());
1754 FPRegister temp = scope.AcquireD();
1755 FPRegister src = g.ToDoubleRegister(source);
1756 if (destination->IsDoubleRegister()) {
1757 FPRegister dst = g.ToDoubleRegister(destination);
1758 __ Fmov(temp, src);
1759 __ Fmov(src, dst);
1760 __ Fmov(dst, temp);
1761 } else {
1762 DCHECK(destination->IsDoubleStackSlot());
1763 MemOperand dst = g.ToMemOperand(destination, masm());
1764 __ Fmov(temp, src);
1765 __ Ldr(src, dst);
1766 __ Str(temp, dst);
1767 }
1768 } else {
1769 // No other combinations are possible.
1770 UNREACHABLE();
1771 }
1772}
1773
1774
Ben Murdoch014dc512016-03-22 12:00:34 +00001775void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
1776 // On 64-bit ARM we emit the jump tables inline.
1777 UNREACHABLE();
1778}
1779
1780
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001781void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); }
1782
1783
1784void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch014dc512016-03-22 12:00:34 +00001785 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
1786 return;
1787 }
1788
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001789 int space_needed = Deoptimizer::patch_size();
Ben Murdoch014dc512016-03-22 12:00:34 +00001790 // Ensure that we have enough space after the previous lazy-bailout
1791 // instruction for patching the code here.
1792 intptr_t current_pc = masm()->pc_offset();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001793
Ben Murdoch014dc512016-03-22 12:00:34 +00001794 if (current_pc < (last_lazy_deopt_pc_ + space_needed)) {
1795 intptr_t padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
1796 DCHECK((padding_size % kInstructionSize) == 0);
1797 InstructionAccurateScope instruction_accurate(
1798 masm(), padding_size / kInstructionSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001799
Ben Murdoch014dc512016-03-22 12:00:34 +00001800 while (padding_size > 0) {
1801 __ nop();
1802 padding_size -= kInstructionSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001803 }
1804 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001805}
1806
1807#undef __
1808
1809} // namespace compiler
1810} // namespace internal
1811} // namespace v8