blob: 9b074b05ccd05e42842da6bc90f191daa25f5b43 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
7#include "src/arm/macro-assembler-arm.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/ast/scopes.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/compiler/code-generator-impl.h"
10#include "src/compiler/gap-resolver.h"
11#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012#include "src/compiler/osr.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013
14namespace v8 {
15namespace internal {
16namespace compiler {
17
18#define __ masm()->
19
20
21#define kScratchReg r9
22
23
24// Adds Arm-specific methods to convert InstructionOperands.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000025class ArmOperandConverter final : public InstructionOperandConverter {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000026 public:
27 ArmOperandConverter(CodeGenerator* gen, Instruction* instr)
28 : InstructionOperandConverter(gen, instr) {}
29
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000030 SwVfpRegister OutputFloat32Register(size_t index = 0) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040031 return ToFloat32Register(instr_->OutputAt(index));
32 }
33
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000034 SwVfpRegister InputFloat32Register(size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040035 return ToFloat32Register(instr_->InputAt(index));
36 }
37
38 SwVfpRegister ToFloat32Register(InstructionOperand* op) {
39 return ToFloat64Register(op).low();
40 }
41
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 LowDwVfpRegister OutputFloat64Register(size_t index = 0) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040043 return ToFloat64Register(instr_->OutputAt(index));
44 }
45
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000046 LowDwVfpRegister InputFloat64Register(size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040047 return ToFloat64Register(instr_->InputAt(index));
48 }
49
50 LowDwVfpRegister ToFloat64Register(InstructionOperand* op) {
51 return LowDwVfpRegister::from_code(ToDoubleRegister(op).code());
52 }
53
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054 SBit OutputSBit() const {
55 switch (instr_->flags_mode()) {
56 case kFlags_branch:
57 case kFlags_set:
58 return SetCC;
59 case kFlags_none:
60 return LeaveCC;
61 }
62 UNREACHABLE();
63 return LeaveCC;
64 }
65
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000066 Operand InputImmediate(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000067 Constant constant = ToConstant(instr_->InputAt(index));
68 switch (constant.type()) {
69 case Constant::kInt32:
70 return Operand(constant.ToInt32());
Emily Bernierd0a1eb72015-03-24 16:35:39 -040071 case Constant::kFloat32:
72 return Operand(
73 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074 case Constant::kFloat64:
75 return Operand(
76 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
77 case Constant::kInt64:
78 case Constant::kExternalReference:
79 case Constant::kHeapObject:
Emily Bernierd0a1eb72015-03-24 16:35:39 -040080 case Constant::kRpoNumber:
Ben Murdochb8a8cc12014-11-26 15:28:44 +000081 break;
82 }
83 UNREACHABLE();
84 return Operand::Zero();
85 }
86
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000087 Operand InputOperand2(size_t first_index) {
88 const size_t index = first_index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089 switch (AddressingModeField::decode(instr_->opcode())) {
90 case kMode_None:
91 case kMode_Offset_RI:
92 case kMode_Offset_RR:
93 break;
94 case kMode_Operand2_I:
95 return InputImmediate(index + 0);
96 case kMode_Operand2_R:
97 return Operand(InputRegister(index + 0));
98 case kMode_Operand2_R_ASR_I:
99 return Operand(InputRegister(index + 0), ASR, InputInt5(index + 1));
100 case kMode_Operand2_R_ASR_R:
101 return Operand(InputRegister(index + 0), ASR, InputRegister(index + 1));
102 case kMode_Operand2_R_LSL_I:
103 return Operand(InputRegister(index + 0), LSL, InputInt5(index + 1));
104 case kMode_Operand2_R_LSL_R:
105 return Operand(InputRegister(index + 0), LSL, InputRegister(index + 1));
106 case kMode_Operand2_R_LSR_I:
107 return Operand(InputRegister(index + 0), LSR, InputInt5(index + 1));
108 case kMode_Operand2_R_LSR_R:
109 return Operand(InputRegister(index + 0), LSR, InputRegister(index + 1));
110 case kMode_Operand2_R_ROR_I:
111 return Operand(InputRegister(index + 0), ROR, InputInt5(index + 1));
112 case kMode_Operand2_R_ROR_R:
113 return Operand(InputRegister(index + 0), ROR, InputRegister(index + 1));
114 }
115 UNREACHABLE();
116 return Operand::Zero();
117 }
118
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000119 MemOperand InputOffset(size_t* first_index) {
120 const size_t index = *first_index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000121 switch (AddressingModeField::decode(instr_->opcode())) {
122 case kMode_None:
123 case kMode_Operand2_I:
124 case kMode_Operand2_R:
125 case kMode_Operand2_R_ASR_I:
126 case kMode_Operand2_R_ASR_R:
127 case kMode_Operand2_R_LSL_I:
128 case kMode_Operand2_R_LSL_R:
129 case kMode_Operand2_R_LSR_I:
130 case kMode_Operand2_R_LSR_R:
131 case kMode_Operand2_R_ROR_I:
132 case kMode_Operand2_R_ROR_R:
133 break;
134 case kMode_Offset_RI:
135 *first_index += 2;
136 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
137 case kMode_Offset_RR:
138 *first_index += 2;
139 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
140 }
141 UNREACHABLE();
142 return MemOperand(r0);
143 }
144
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000145 MemOperand InputOffset(size_t first_index = 0) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400146 return InputOffset(&first_index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000147 }
148
149 MemOperand ToMemOperand(InstructionOperand* op) const {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000150 DCHECK_NOT_NULL(op);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000151 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000152 FrameOffset offset = frame_access_state()->GetFrameOffset(
153 AllocatedOperand::cast(op)->index());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
155 }
156};
157
158
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400159namespace {
160
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000161class OutOfLineLoadFloat32 final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400162 public:
163 OutOfLineLoadFloat32(CodeGenerator* gen, SwVfpRegister result)
164 : OutOfLineCode(gen), result_(result) {}
165
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000166 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400167 __ vmov(result_, std::numeric_limits<float>::quiet_NaN());
168 }
169
170 private:
171 SwVfpRegister const result_;
172};
173
174
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000175class OutOfLineLoadFloat64 final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400176 public:
177 OutOfLineLoadFloat64(CodeGenerator* gen, DwVfpRegister result)
178 : OutOfLineCode(gen), result_(result) {}
179
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000180 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400181 __ vmov(result_, std::numeric_limits<double>::quiet_NaN(), kScratchReg);
182 }
183
184 private:
185 DwVfpRegister const result_;
186};
187
188
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000189class OutOfLineLoadInteger final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400190 public:
191 OutOfLineLoadInteger(CodeGenerator* gen, Register result)
192 : OutOfLineCode(gen), result_(result) {}
193
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000194 void Generate() final { __ mov(result_, Operand::Zero()); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400195
196 private:
197 Register const result_;
198};
199
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200
201class OutOfLineRecordWrite final : public OutOfLineCode {
202 public:
203 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Register index,
204 Register value, Register scratch0, Register scratch1,
205 RecordWriteMode mode)
206 : OutOfLineCode(gen),
207 object_(object),
208 index_(index),
209 value_(value),
210 scratch0_(scratch0),
211 scratch1_(scratch1),
212 mode_(mode) {}
213
214 void Generate() final {
215 if (mode_ > RecordWriteMode::kValueIsPointer) {
216 __ JumpIfSmi(value_, exit());
217 }
218 if (mode_ > RecordWriteMode::kValueIsMap) {
219 __ CheckPageFlag(value_, scratch0_,
220 MemoryChunk::kPointersToHereAreInterestingMask, eq,
221 exit());
222 }
223 SaveFPRegsMode const save_fp_mode =
224 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
225 // TODO(turbofan): Once we get frame elision working, we need to save
226 // and restore lr properly here if the frame was elided.
227 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
228 EMIT_REMEMBERED_SET, save_fp_mode);
229 __ add(scratch1_, object_, index_);
230 __ CallStub(&stub);
231 }
232
233 private:
234 Register const object_;
235 Register const index_;
236 Register const value_;
237 Register const scratch0_;
238 Register const scratch1_;
239 RecordWriteMode const mode_;
240};
241
242
243Condition FlagsConditionToCondition(FlagsCondition condition) {
244 switch (condition) {
245 case kEqual:
246 return eq;
247 case kNotEqual:
248 return ne;
249 case kSignedLessThan:
250 return lt;
251 case kSignedGreaterThanOrEqual:
252 return ge;
253 case kSignedLessThanOrEqual:
254 return le;
255 case kSignedGreaterThan:
256 return gt;
257 case kUnsignedLessThan:
258 return lo;
259 case kUnsignedGreaterThanOrEqual:
260 return hs;
261 case kUnsignedLessThanOrEqual:
262 return ls;
263 case kUnsignedGreaterThan:
264 return hi;
265 case kFloatLessThanOrUnordered:
266 return lt;
267 case kFloatGreaterThanOrEqual:
268 return ge;
269 case kFloatLessThanOrEqual:
270 return ls;
271 case kFloatGreaterThanOrUnordered:
272 return hi;
273 case kFloatLessThan:
274 return lo;
275 case kFloatGreaterThanOrEqualOrUnordered:
276 return hs;
277 case kFloatLessThanOrEqualOrUnordered:
278 return le;
279 case kFloatGreaterThan:
280 return gt;
281 case kOverflow:
282 return vs;
283 case kNotOverflow:
284 return vc;
285 default:
286 break;
287 }
288 UNREACHABLE();
289 return kNoCondition;
290}
291
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400292} // namespace
293
294
295#define ASSEMBLE_CHECKED_LOAD_FLOAT(width) \
296 do { \
297 auto result = i.OutputFloat##width##Register(); \
298 auto offset = i.InputRegister(0); \
299 if (instr->InputAt(1)->IsRegister()) { \
300 __ cmp(offset, i.InputRegister(1)); \
301 } else { \
302 __ cmp(offset, i.InputImmediate(1)); \
303 } \
304 auto ool = new (zone()) OutOfLineLoadFloat##width(this, result); \
305 __ b(hs, ool->entry()); \
306 __ vldr(result, i.InputOffset(2)); \
307 __ bind(ool->exit()); \
308 DCHECK_EQ(LeaveCC, i.OutputSBit()); \
309 } while (0)
310
311
312#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
313 do { \
314 auto result = i.OutputRegister(); \
315 auto offset = i.InputRegister(0); \
316 if (instr->InputAt(1)->IsRegister()) { \
317 __ cmp(offset, i.InputRegister(1)); \
318 } else { \
319 __ cmp(offset, i.InputImmediate(1)); \
320 } \
321 auto ool = new (zone()) OutOfLineLoadInteger(this, result); \
322 __ b(hs, ool->entry()); \
323 __ asm_instr(result, i.InputOffset(2)); \
324 __ bind(ool->exit()); \
325 DCHECK_EQ(LeaveCC, i.OutputSBit()); \
326 } while (0)
327
328
329#define ASSEMBLE_CHECKED_STORE_FLOAT(width) \
330 do { \
331 auto offset = i.InputRegister(0); \
332 if (instr->InputAt(1)->IsRegister()) { \
333 __ cmp(offset, i.InputRegister(1)); \
334 } else { \
335 __ cmp(offset, i.InputImmediate(1)); \
336 } \
337 auto value = i.InputFloat##width##Register(2); \
338 __ vstr(value, i.InputOffset(3), lo); \
339 DCHECK_EQ(LeaveCC, i.OutputSBit()); \
340 } while (0)
341
342
343#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
344 do { \
345 auto offset = i.InputRegister(0); \
346 if (instr->InputAt(1)->IsRegister()) { \
347 __ cmp(offset, i.InputRegister(1)); \
348 } else { \
349 __ cmp(offset, i.InputImmediate(1)); \
350 } \
351 auto value = i.InputRegister(2); \
352 __ asm_instr(value, i.InputOffset(3), lo); \
353 DCHECK_EQ(LeaveCC, i.OutputSBit()); \
354 } while (0)
355
356
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000357void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
358 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
359 if (sp_slot_delta > 0) {
360 __ add(sp, sp, Operand(sp_slot_delta * kPointerSize));
361 }
362 frame_access_state()->SetFrameAccessToDefault();
363}
364
365
366void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
367 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
368 if (sp_slot_delta < 0) {
369 __ sub(sp, sp, Operand(-sp_slot_delta * kPointerSize));
370 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
371 }
372 if (frame()->needs_frame()) {
373 if (FLAG_enable_embedded_constant_pool) {
374 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kConstantPoolOffset));
375 }
376 __ ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
377 __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
378 }
379 frame_access_state()->SetFrameAccessToSP();
380}
381
382
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000383// Assembles an instruction after register allocation, producing machine code.
384void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
385 ArmOperandConverter i(this, instr);
386
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000387 masm()->MaybeCheckConstPool();
388
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000389 switch (ArchOpcodeField::decode(instr->opcode())) {
390 case kArchCallCodeObject: {
391 EnsureSpaceForLazyDeopt();
392 if (instr->InputAt(0)->IsImmediate()) {
393 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
394 RelocInfo::CODE_TARGET);
395 } else {
396 __ add(ip, i.InputRegister(0),
397 Operand(Code::kHeaderSize - kHeapObjectTag));
398 __ Call(ip);
399 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000400 RecordCallPosition(instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000401 DCHECK_EQ(LeaveCC, i.OutputSBit());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000402 frame_access_state()->ClearSPDelta();
403 break;
404 }
405 case kArchTailCallCodeObject: {
406 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
407 AssembleDeconstructActivationRecord(stack_param_delta);
408 if (instr->InputAt(0)->IsImmediate()) {
409 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
410 RelocInfo::CODE_TARGET);
411 } else {
412 __ add(ip, i.InputRegister(0),
413 Operand(Code::kHeaderSize - kHeapObjectTag));
414 __ Jump(ip);
415 }
416 DCHECK_EQ(LeaveCC, i.OutputSBit());
417 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000418 break;
419 }
420 case kArchCallJSFunction: {
421 EnsureSpaceForLazyDeopt();
422 Register func = i.InputRegister(0);
423 if (FLAG_debug_code) {
424 // Check the function's context matches the context argument.
425 __ ldr(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
426 __ cmp(cp, kScratchReg);
427 __ Assert(eq, kWrongFunctionContext);
428 }
429 __ ldr(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
430 __ Call(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000431 RecordCallPosition(instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000432 DCHECK_EQ(LeaveCC, i.OutputSBit());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000433 frame_access_state()->ClearSPDelta();
434 break;
435 }
436 case kArchTailCallJSFunction: {
437 Register func = i.InputRegister(0);
438 if (FLAG_debug_code) {
439 // Check the function's context matches the context argument.
440 __ ldr(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
441 __ cmp(cp, kScratchReg);
442 __ Assert(eq, kWrongFunctionContext);
443 }
444 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
445 AssembleDeconstructActivationRecord(stack_param_delta);
446 __ ldr(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
447 __ Jump(ip);
448 DCHECK_EQ(LeaveCC, i.OutputSBit());
449 frame_access_state()->ClearSPDelta();
450 break;
451 }
452 case kArchLazyBailout: {
453 EnsureSpaceForLazyDeopt();
454 RecordCallPosition(instr);
455 break;
456 }
457 case kArchPrepareCallCFunction: {
458 int const num_parameters = MiscField::decode(instr->opcode());
459 __ PrepareCallCFunction(num_parameters, kScratchReg);
460 // Frame alignment requires using FP-relative frame addressing.
461 frame_access_state()->SetFrameAccessToFP();
462 break;
463 }
464 case kArchPrepareTailCall:
465 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
466 break;
467 case kArchCallCFunction: {
468 int const num_parameters = MiscField::decode(instr->opcode());
469 if (instr->InputAt(0)->IsImmediate()) {
470 ExternalReference ref = i.InputExternalReference(0);
471 __ CallCFunction(ref, num_parameters);
472 } else {
473 Register func = i.InputRegister(0);
474 __ CallCFunction(func, num_parameters);
475 }
476 frame_access_state()->SetFrameAccessToDefault();
477 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000478 break;
479 }
480 case kArchJmp:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400481 AssembleArchJump(i.InputRpo(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000482 DCHECK_EQ(LeaveCC, i.OutputSBit());
483 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000484 case kArchLookupSwitch:
485 AssembleArchLookupSwitch(instr);
486 DCHECK_EQ(LeaveCC, i.OutputSBit());
487 break;
488 case kArchTableSwitch:
489 AssembleArchTableSwitch(instr);
490 DCHECK_EQ(LeaveCC, i.OutputSBit());
491 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000492 case kArchNop:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000493 case kArchThrowTerminator:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000494 // don't emit code for nops.
495 DCHECK_EQ(LeaveCC, i.OutputSBit());
496 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000497 case kArchDeoptimize: {
498 int deopt_state_id =
499 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
500 Deoptimizer::BailoutType bailout_type =
501 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
502 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
503 break;
504 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505 case kArchRet:
506 AssembleReturn();
507 DCHECK_EQ(LeaveCC, i.OutputSBit());
508 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400509 case kArchStackPointer:
510 __ mov(i.OutputRegister(), sp);
511 DCHECK_EQ(LeaveCC, i.OutputSBit());
512 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000513 case kArchFramePointer:
514 __ mov(i.OutputRegister(), fp);
515 DCHECK_EQ(LeaveCC, i.OutputSBit());
516 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517 case kArchTruncateDoubleToI:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400518 __ TruncateDoubleToI(i.OutputRegister(), i.InputFloat64Register(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000519 DCHECK_EQ(LeaveCC, i.OutputSBit());
520 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000521 case kArchStoreWithWriteBarrier: {
522 RecordWriteMode mode =
523 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
524 Register object = i.InputRegister(0);
525 Register index = i.InputRegister(1);
526 Register value = i.InputRegister(2);
527 Register scratch0 = i.TempRegister(0);
528 Register scratch1 = i.TempRegister(1);
529 auto ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
530 scratch0, scratch1, mode);
531 __ str(value, MemOperand(object, index));
532 __ CheckPageFlag(object, scratch0,
533 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
534 ool->entry());
535 __ bind(ool->exit());
536 break;
537 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000538 case kArmAdd:
539 __ add(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
540 i.OutputSBit());
541 break;
542 case kArmAnd:
543 __ and_(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
544 i.OutputSBit());
545 break;
546 case kArmBic:
547 __ bic(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
548 i.OutputSBit());
549 break;
550 case kArmMul:
551 __ mul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
552 i.OutputSBit());
553 break;
554 case kArmMla:
555 __ mla(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
556 i.InputRegister(2), i.OutputSBit());
557 break;
558 case kArmMls: {
559 CpuFeatureScope scope(masm(), MLS);
560 __ mls(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
561 i.InputRegister(2));
562 DCHECK_EQ(LeaveCC, i.OutputSBit());
563 break;
564 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400565 case kArmSmmul:
566 __ smmul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
567 DCHECK_EQ(LeaveCC, i.OutputSBit());
568 break;
569 case kArmSmmla:
570 __ smmla(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
571 i.InputRegister(2));
572 DCHECK_EQ(LeaveCC, i.OutputSBit());
573 break;
574 case kArmUmull:
575 __ umull(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
576 i.InputRegister(1), i.OutputSBit());
577 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000578 case kArmSdiv: {
579 CpuFeatureScope scope(masm(), SUDIV);
580 __ sdiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
581 DCHECK_EQ(LeaveCC, i.OutputSBit());
582 break;
583 }
584 case kArmUdiv: {
585 CpuFeatureScope scope(masm(), SUDIV);
586 __ udiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
587 DCHECK_EQ(LeaveCC, i.OutputSBit());
588 break;
589 }
590 case kArmMov:
591 __ Move(i.OutputRegister(), i.InputOperand2(0), i.OutputSBit());
592 break;
593 case kArmMvn:
594 __ mvn(i.OutputRegister(), i.InputOperand2(0), i.OutputSBit());
595 break;
596 case kArmOrr:
597 __ orr(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
598 i.OutputSBit());
599 break;
600 case kArmEor:
601 __ eor(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
602 i.OutputSBit());
603 break;
604 case kArmSub:
605 __ sub(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
606 i.OutputSBit());
607 break;
608 case kArmRsb:
609 __ rsb(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
610 i.OutputSBit());
611 break;
612 case kArmBfc: {
613 CpuFeatureScope scope(masm(), ARMv7);
614 __ bfc(i.OutputRegister(), i.InputInt8(1), i.InputInt8(2));
615 DCHECK_EQ(LeaveCC, i.OutputSBit());
616 break;
617 }
618 case kArmUbfx: {
619 CpuFeatureScope scope(masm(), ARMv7);
620 __ ubfx(i.OutputRegister(), i.InputRegister(0), i.InputInt8(1),
621 i.InputInt8(2));
622 DCHECK_EQ(LeaveCC, i.OutputSBit());
623 break;
624 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400625 case kArmSxtb:
626 __ sxtb(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1));
627 DCHECK_EQ(LeaveCC, i.OutputSBit());
628 break;
629 case kArmSxth:
630 __ sxth(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1));
631 DCHECK_EQ(LeaveCC, i.OutputSBit());
632 break;
633 case kArmSxtab:
634 __ sxtab(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
635 i.InputInt32(2));
636 DCHECK_EQ(LeaveCC, i.OutputSBit());
637 break;
638 case kArmSxtah:
639 __ sxtah(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
640 i.InputInt32(2));
641 DCHECK_EQ(LeaveCC, i.OutputSBit());
642 break;
643 case kArmUxtb:
644 __ uxtb(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1));
645 DCHECK_EQ(LeaveCC, i.OutputSBit());
646 break;
647 case kArmUxth:
648 __ uxth(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1));
649 DCHECK_EQ(LeaveCC, i.OutputSBit());
650 break;
651 case kArmUxtab:
652 __ uxtab(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
653 i.InputInt32(2));
654 DCHECK_EQ(LeaveCC, i.OutputSBit());
655 break;
656 case kArmUxtah:
657 __ uxtah(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
658 i.InputInt32(2));
659 DCHECK_EQ(LeaveCC, i.OutputSBit());
660 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000661 case kArmClz:
662 __ clz(i.OutputRegister(), i.InputRegister(0));
663 DCHECK_EQ(LeaveCC, i.OutputSBit());
664 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000665 case kArmCmp:
666 __ cmp(i.InputRegister(0), i.InputOperand2(1));
667 DCHECK_EQ(SetCC, i.OutputSBit());
668 break;
669 case kArmCmn:
670 __ cmn(i.InputRegister(0), i.InputOperand2(1));
671 DCHECK_EQ(SetCC, i.OutputSBit());
672 break;
673 case kArmTst:
674 __ tst(i.InputRegister(0), i.InputOperand2(1));
675 DCHECK_EQ(SetCC, i.OutputSBit());
676 break;
677 case kArmTeq:
678 __ teq(i.InputRegister(0), i.InputOperand2(1));
679 DCHECK_EQ(SetCC, i.OutputSBit());
680 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000681 case kArmVcmpF32:
682 if (instr->InputAt(1)->IsDoubleRegister()) {
683 __ VFPCompareAndSetFlags(i.InputFloat32Register(0),
684 i.InputFloat32Register(1));
685 } else {
686 DCHECK(instr->InputAt(1)->IsImmediate());
687 // 0.0 is the only immediate supported by vcmp instructions.
688 DCHECK(i.InputFloat32(1) == 0.0f);
689 __ VFPCompareAndSetFlags(i.InputFloat32Register(0), i.InputFloat32(1));
690 }
691 DCHECK_EQ(SetCC, i.OutputSBit());
692 break;
693 case kArmVaddF32:
694 __ vadd(i.OutputFloat32Register(), i.InputFloat32Register(0),
695 i.InputFloat32Register(1));
696 DCHECK_EQ(LeaveCC, i.OutputSBit());
697 break;
698 case kArmVsubF32:
699 __ vsub(i.OutputFloat32Register(), i.InputFloat32Register(0),
700 i.InputFloat32Register(1));
701 DCHECK_EQ(LeaveCC, i.OutputSBit());
702 break;
703 case kArmVmulF32:
704 __ vmul(i.OutputFloat32Register(), i.InputFloat32Register(0),
705 i.InputFloat32Register(1));
706 DCHECK_EQ(LeaveCC, i.OutputSBit());
707 break;
708 case kArmVmlaF32:
709 __ vmla(i.OutputFloat32Register(), i.InputFloat32Register(1),
710 i.InputFloat32Register(2));
711 DCHECK_EQ(LeaveCC, i.OutputSBit());
712 break;
713 case kArmVmlsF32:
714 __ vmls(i.OutputFloat32Register(), i.InputFloat32Register(1),
715 i.InputFloat32Register(2));
716 DCHECK_EQ(LeaveCC, i.OutputSBit());
717 break;
718 case kArmVdivF32:
719 __ vdiv(i.OutputFloat32Register(), i.InputFloat32Register(0),
720 i.InputFloat32Register(1));
721 DCHECK_EQ(LeaveCC, i.OutputSBit());
722 break;
723 case kArmVsqrtF32:
724 __ vsqrt(i.OutputFloat32Register(), i.InputFloat32Register(0));
725 break;
726 case kArmVabsF32:
727 __ vabs(i.OutputFloat32Register(), i.InputFloat32Register(0));
728 break;
729 case kArmVnegF32:
730 __ vneg(i.OutputFloat32Register(), i.InputFloat32Register(0));
731 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000732 case kArmVcmpF64:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000733 if (instr->InputAt(1)->IsDoubleRegister()) {
734 __ VFPCompareAndSetFlags(i.InputFloat64Register(0),
735 i.InputFloat64Register(1));
736 } else {
737 DCHECK(instr->InputAt(1)->IsImmediate());
738 // 0.0 is the only immediate supported by vcmp instructions.
739 DCHECK(i.InputDouble(1) == 0.0);
740 __ VFPCompareAndSetFlags(i.InputFloat64Register(0), i.InputDouble(1));
741 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000742 DCHECK_EQ(SetCC, i.OutputSBit());
743 break;
744 case kArmVaddF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400745 __ vadd(i.OutputFloat64Register(), i.InputFloat64Register(0),
746 i.InputFloat64Register(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000747 DCHECK_EQ(LeaveCC, i.OutputSBit());
748 break;
749 case kArmVsubF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400750 __ vsub(i.OutputFloat64Register(), i.InputFloat64Register(0),
751 i.InputFloat64Register(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000752 DCHECK_EQ(LeaveCC, i.OutputSBit());
753 break;
754 case kArmVmulF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400755 __ vmul(i.OutputFloat64Register(), i.InputFloat64Register(0),
756 i.InputFloat64Register(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000757 DCHECK_EQ(LeaveCC, i.OutputSBit());
758 break;
759 case kArmVmlaF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400760 __ vmla(i.OutputFloat64Register(), i.InputFloat64Register(1),
761 i.InputFloat64Register(2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000762 DCHECK_EQ(LeaveCC, i.OutputSBit());
763 break;
764 case kArmVmlsF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400765 __ vmls(i.OutputFloat64Register(), i.InputFloat64Register(1),
766 i.InputFloat64Register(2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000767 DCHECK_EQ(LeaveCC, i.OutputSBit());
768 break;
769 case kArmVdivF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400770 __ vdiv(i.OutputFloat64Register(), i.InputFloat64Register(0),
771 i.InputFloat64Register(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000772 DCHECK_EQ(LeaveCC, i.OutputSBit());
773 break;
774 case kArmVmodF64: {
775 // TODO(bmeurer): We should really get rid of this special instruction,
776 // and generate a CallAddress instruction instead.
777 FrameScope scope(masm(), StackFrame::MANUAL);
778 __ PrepareCallCFunction(0, 2, kScratchReg);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400779 __ MovToFloatParameters(i.InputFloat64Register(0),
780 i.InputFloat64Register(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000781 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
782 0, 2);
783 // Move the result in the double result register.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400784 __ MovFromFloatResult(i.OutputFloat64Register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000785 DCHECK_EQ(LeaveCC, i.OutputSBit());
786 break;
787 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000788 case kArmVsqrtF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400789 __ vsqrt(i.OutputFloat64Register(), i.InputFloat64Register(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000790 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000791 case kArmVabsF64:
792 __ vabs(i.OutputFloat64Register(), i.InputFloat64Register(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400793 break;
794 case kArmVnegF64:
795 __ vneg(i.OutputFloat64Register(), i.InputFloat64Register(0));
796 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000797 case kArmVrintmF32:
798 __ vrintm(i.OutputFloat32Register(), i.InputFloat32Register(0));
799 break;
800 case kArmVrintmF64:
801 __ vrintm(i.OutputFloat64Register(), i.InputFloat64Register(0));
802 break;
803 case kArmVrintpF32:
804 __ vrintp(i.OutputFloat32Register(), i.InputFloat32Register(0));
805 break;
806 case kArmVrintpF64:
807 __ vrintp(i.OutputFloat64Register(), i.InputFloat64Register(0));
808 break;
809 case kArmVrintzF32:
810 __ vrintz(i.OutputFloat32Register(), i.InputFloat32Register(0));
811 break;
812 case kArmVrintzF64:
813 __ vrintz(i.OutputFloat64Register(), i.InputFloat64Register(0));
814 break;
815 case kArmVrintaF64:
816 __ vrinta(i.OutputFloat64Register(), i.InputFloat64Register(0));
817 break;
818 case kArmVrintnF32:
819 __ vrintn(i.OutputFloat32Register(), i.InputFloat32Register(0));
820 break;
821 case kArmVrintnF64:
822 __ vrintn(i.OutputFloat64Register(), i.InputFloat64Register(0));
823 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400824 case kArmVcvtF32F64: {
825 __ vcvt_f32_f64(i.OutputFloat32Register(), i.InputFloat64Register(0));
826 DCHECK_EQ(LeaveCC, i.OutputSBit());
827 break;
828 }
829 case kArmVcvtF64F32: {
830 __ vcvt_f64_f32(i.OutputFloat64Register(), i.InputFloat32Register(0));
831 DCHECK_EQ(LeaveCC, i.OutputSBit());
832 break;
833 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000834 case kArmVcvtF64S32: {
835 SwVfpRegister scratch = kScratchDoubleReg.low();
836 __ vmov(scratch, i.InputRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400837 __ vcvt_f64_s32(i.OutputFloat64Register(), scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000838 DCHECK_EQ(LeaveCC, i.OutputSBit());
839 break;
840 }
841 case kArmVcvtF64U32: {
842 SwVfpRegister scratch = kScratchDoubleReg.low();
843 __ vmov(scratch, i.InputRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400844 __ vcvt_f64_u32(i.OutputFloat64Register(), scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845 DCHECK_EQ(LeaveCC, i.OutputSBit());
846 break;
847 }
848 case kArmVcvtS32F64: {
849 SwVfpRegister scratch = kScratchDoubleReg.low();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400850 __ vcvt_s32_f64(scratch, i.InputFloat64Register(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000851 __ vmov(i.OutputRegister(), scratch);
852 DCHECK_EQ(LeaveCC, i.OutputSBit());
853 break;
854 }
855 case kArmVcvtU32F64: {
856 SwVfpRegister scratch = kScratchDoubleReg.low();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400857 __ vcvt_u32_f64(scratch, i.InputFloat64Register(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000858 __ vmov(i.OutputRegister(), scratch);
859 DCHECK_EQ(LeaveCC, i.OutputSBit());
860 break;
861 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000862 case kArmVmovLowU32F64:
863 __ VmovLow(i.OutputRegister(), i.InputFloat64Register(0));
864 DCHECK_EQ(LeaveCC, i.OutputSBit());
865 break;
866 case kArmVmovLowF64U32:
867 __ VmovLow(i.OutputFloat64Register(), i.InputRegister(1));
868 DCHECK_EQ(LeaveCC, i.OutputSBit());
869 break;
870 case kArmVmovHighU32F64:
871 __ VmovHigh(i.OutputRegister(), i.InputFloat64Register(0));
872 DCHECK_EQ(LeaveCC, i.OutputSBit());
873 break;
874 case kArmVmovHighF64U32:
875 __ VmovHigh(i.OutputFloat64Register(), i.InputRegister(1));
876 DCHECK_EQ(LeaveCC, i.OutputSBit());
877 break;
878 case kArmVmovF64U32U32:
879 __ vmov(i.OutputFloat64Register(), i.InputRegister(0),
880 i.InputRegister(1));
881 DCHECK_EQ(LeaveCC, i.OutputSBit());
882 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000883 case kArmLdrb:
884 __ ldrb(i.OutputRegister(), i.InputOffset());
885 DCHECK_EQ(LeaveCC, i.OutputSBit());
886 break;
887 case kArmLdrsb:
888 __ ldrsb(i.OutputRegister(), i.InputOffset());
889 DCHECK_EQ(LeaveCC, i.OutputSBit());
890 break;
891 case kArmStrb: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000892 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000893 MemOperand operand = i.InputOffset(&index);
894 __ strb(i.InputRegister(index), operand);
895 DCHECK_EQ(LeaveCC, i.OutputSBit());
896 break;
897 }
898 case kArmLdrh:
899 __ ldrh(i.OutputRegister(), i.InputOffset());
900 break;
901 case kArmLdrsh:
902 __ ldrsh(i.OutputRegister(), i.InputOffset());
903 break;
904 case kArmStrh: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000905 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000906 MemOperand operand = i.InputOffset(&index);
907 __ strh(i.InputRegister(index), operand);
908 DCHECK_EQ(LeaveCC, i.OutputSBit());
909 break;
910 }
911 case kArmLdr:
912 __ ldr(i.OutputRegister(), i.InputOffset());
913 break;
914 case kArmStr: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000915 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000916 MemOperand operand = i.InputOffset(&index);
917 __ str(i.InputRegister(index), operand);
918 DCHECK_EQ(LeaveCC, i.OutputSBit());
919 break;
920 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400921 case kArmVldrF32: {
922 __ vldr(i.OutputFloat32Register(), i.InputOffset());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000923 DCHECK_EQ(LeaveCC, i.OutputSBit());
924 break;
925 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400926 case kArmVstrF32: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000927 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000928 MemOperand operand = i.InputOffset(&index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400929 __ vstr(i.InputFloat32Register(index), operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000930 DCHECK_EQ(LeaveCC, i.OutputSBit());
931 break;
932 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400933 case kArmVldrF64:
934 __ vldr(i.OutputFloat64Register(), i.InputOffset());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000935 DCHECK_EQ(LeaveCC, i.OutputSBit());
936 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400937 case kArmVstrF64: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000938 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000939 MemOperand operand = i.InputOffset(&index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400940 __ vstr(i.InputFloat64Register(index), operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000941 DCHECK_EQ(LeaveCC, i.OutputSBit());
942 break;
943 }
944 case kArmPush:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000945 if (instr->InputAt(0)->IsDoubleRegister()) {
946 __ vpush(i.InputDoubleRegister(0));
947 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
948 } else {
949 __ push(i.InputRegister(0));
950 frame_access_state()->IncreaseSPDelta(1);
951 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000952 DCHECK_EQ(LeaveCC, i.OutputSBit());
953 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000954 case kArmPoke: {
955 int const slot = MiscField::decode(instr->opcode());
956 __ str(i.InputRegister(0), MemOperand(sp, slot * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000957 DCHECK_EQ(LeaveCC, i.OutputSBit());
958 break;
959 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400960 case kCheckedLoadInt8:
961 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrsb);
962 break;
963 case kCheckedLoadUint8:
964 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrb);
965 break;
966 case kCheckedLoadInt16:
967 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrsh);
968 break;
969 case kCheckedLoadUint16:
970 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrh);
971 break;
972 case kCheckedLoadWord32:
973 ASSEMBLE_CHECKED_LOAD_INTEGER(ldr);
974 break;
975 case kCheckedLoadFloat32:
976 ASSEMBLE_CHECKED_LOAD_FLOAT(32);
977 break;
978 case kCheckedLoadFloat64:
979 ASSEMBLE_CHECKED_LOAD_FLOAT(64);
980 break;
981 case kCheckedStoreWord8:
982 ASSEMBLE_CHECKED_STORE_INTEGER(strb);
983 break;
984 case kCheckedStoreWord16:
985 ASSEMBLE_CHECKED_STORE_INTEGER(strh);
986 break;
987 case kCheckedStoreWord32:
988 ASSEMBLE_CHECKED_STORE_INTEGER(str);
989 break;
990 case kCheckedStoreFloat32:
991 ASSEMBLE_CHECKED_STORE_FLOAT(32);
992 break;
993 case kCheckedStoreFloat64:
994 ASSEMBLE_CHECKED_STORE_FLOAT(64);
995 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000996 case kCheckedLoadWord64:
997 case kCheckedStoreWord64:
998 UNREACHABLE(); // currently unsupported checked int64 load/store.
999 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001000 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001001} // NOLINT(readability/fn_size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001002
1003
1004// Assembles branches after an instruction.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001005void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001006 ArmOperandConverter i(this, instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001007 Label* tlabel = branch->true_label;
1008 Label* flabel = branch->false_label;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001009 Condition cc = FlagsConditionToCondition(branch->condition);
1010 __ b(cc, tlabel);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001011 if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
1012}
1013
1014
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001015void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001016 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001017}
1018
1019
1020// Assembles boolean materializations after an instruction.
1021void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1022 FlagsCondition condition) {
1023 ArmOperandConverter i(this, instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001024
1025 // Materialize a full 32-bit 1 or 0 value. The result register is always the
1026 // last output of the instruction.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001027 DCHECK_NE(0u, instr->OutputCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001028 Register reg = i.OutputRegister(instr->OutputCount() - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001029 Condition cc = FlagsConditionToCondition(condition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001030 __ mov(reg, Operand(0));
1031 __ mov(reg, Operand(1), LeaveCC, cc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001032}
1033
1034
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001035void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1036 ArmOperandConverter i(this, instr);
1037 Register input = i.InputRegister(0);
1038 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1039 __ cmp(input, Operand(i.InputInt32(index + 0)));
1040 __ b(eq, GetLabel(i.InputRpo(index + 1)));
1041 }
1042 AssembleArchJump(i.InputRpo(1));
1043}
1044
1045
1046void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1047 ArmOperandConverter i(this, instr);
1048 Register input = i.InputRegister(0);
1049 size_t const case_count = instr->InputCount() - 2;
1050 // Ensure to emit the constant pool first if necessary.
1051 __ CheckConstPool(true, true);
1052 __ cmp(input, Operand(case_count));
1053 __ BlockConstPoolFor(case_count + 2);
1054 __ add(pc, pc, Operand(input, LSL, 2), LeaveCC, lo);
1055 __ b(GetLabel(i.InputRpo(1)));
1056 for (size_t index = 0; index < case_count; ++index) {
1057 __ b(GetLabel(i.InputRpo(index + 2)));
1058 }
1059}
1060
1061
1062void CodeGenerator::AssembleDeoptimizerCall(
1063 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001064 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001065 isolate(), deoptimization_id, bailout_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001066 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1067}
1068
1069
1070void CodeGenerator::AssemblePrologue() {
1071 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001072 if (descriptor->IsCFunctionCall()) {
1073 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001074 __ Push(lr, fp, pp);
1075 // Adjust FP to point to saved FP.
1076 __ sub(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001077 } else {
1078 __ Push(lr, fp);
1079 __ mov(fp, sp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001080 }
1081 } else if (descriptor->IsJSFunctionCall()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001082 __ Prologue(this->info()->GeneratePreagedPrologue());
1083 } else if (frame()->needs_frame()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001084 __ StubPrologue();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001085 } else {
1086 frame()->SetElidedFrameSizeInSlots(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001087 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001088 frame_access_state()->SetFrameAccessToDefault();
1089
1090 int stack_shrink_slots = frame()->GetSpillSlotCount();
1091 if (info()->is_osr()) {
1092 // TurboFan OSR-compiled functions cannot be entered directly.
1093 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1094
1095 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1096 // frame is still on the stack. Optimized code uses OSR values directly from
1097 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1098 // remaining stack slots.
1099 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1100 osr_pc_offset_ = __ pc_offset();
1101 // TODO(titzer): cannot address target function == local #-1
1102 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1103 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
1104 }
1105
1106 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
1107 if (saves_fp != 0) {
1108 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots();
1109 }
1110 if (stack_shrink_slots > 0) {
1111 __ sub(sp, sp, Operand(stack_shrink_slots * kPointerSize));
1112 }
1113
1114 if (saves_fp != 0) {
1115 // Save callee-saved FP registers.
1116 STATIC_ASSERT(DwVfpRegister::kMaxNumRegisters == 32);
1117 uint32_t last = base::bits::CountLeadingZeros32(saves_fp) - 1;
1118 uint32_t first = base::bits::CountTrailingZeros32(saves_fp);
1119 DCHECK_EQ((last - first + 1), base::bits::CountPopulation32(saves_fp));
1120 __ vstm(db_w, sp, DwVfpRegister::from_code(first),
1121 DwVfpRegister::from_code(last));
1122 frame()->AllocateSavedCalleeRegisterSlots((last - first + 1) *
1123 (kDoubleSize / kPointerSize));
1124 }
1125 const RegList saves = FLAG_enable_embedded_constant_pool
1126 ? (descriptor->CalleeSavedRegisters() & ~pp.bit())
1127 : descriptor->CalleeSavedRegisters();
1128 if (saves != 0) {
1129 // Save callee-saved registers.
1130 __ stm(db_w, sp, saves);
1131 frame()->AllocateSavedCalleeRegisterSlots(
1132 base::bits::CountPopulation32(saves));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001133 }
1134}
1135
1136
1137void CodeGenerator::AssembleReturn() {
1138 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001139 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1140
1141 // Restore registers.
1142 const RegList saves = FLAG_enable_embedded_constant_pool
1143 ? (descriptor->CalleeSavedRegisters() & ~pp.bit())
1144 : descriptor->CalleeSavedRegisters();
1145 if (saves != 0) {
1146 __ ldm(ia_w, sp, saves);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001147 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001148
1149 // Restore FP registers.
1150 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
1151 if (saves_fp != 0) {
1152 STATIC_ASSERT(DwVfpRegister::kMaxNumRegisters == 32);
1153 uint32_t last = base::bits::CountLeadingZeros32(saves_fp) - 1;
1154 uint32_t first = base::bits::CountTrailingZeros32(saves_fp);
1155 __ vldm(ia_w, sp, DwVfpRegister::from_code(first),
1156 DwVfpRegister::from_code(last));
1157 }
1158
1159 if (descriptor->IsCFunctionCall()) {
1160 __ LeaveFrame(StackFrame::MANUAL);
1161 } else if (frame()->needs_frame()) {
1162 // Canonicalize JSFunction return sites for now.
1163 if (return_label_.is_bound()) {
1164 __ b(&return_label_);
1165 return;
1166 } else {
1167 __ bind(&return_label_);
1168 __ LeaveFrame(StackFrame::MANUAL);
1169 }
1170 }
1171 __ Ret(pop_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001172}
1173
1174
1175void CodeGenerator::AssembleMove(InstructionOperand* source,
1176 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001177 ArmOperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001178 // Dispatch on the source and destination operand kinds. Not all
1179 // combinations are possible.
1180 if (source->IsRegister()) {
1181 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1182 Register src = g.ToRegister(source);
1183 if (destination->IsRegister()) {
1184 __ mov(g.ToRegister(destination), src);
1185 } else {
1186 __ str(src, g.ToMemOperand(destination));
1187 }
1188 } else if (source->IsStackSlot()) {
1189 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1190 MemOperand src = g.ToMemOperand(source);
1191 if (destination->IsRegister()) {
1192 __ ldr(g.ToRegister(destination), src);
1193 } else {
1194 Register temp = kScratchReg;
1195 __ ldr(temp, src);
1196 __ str(temp, g.ToMemOperand(destination));
1197 }
1198 } else if (source->IsConstant()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001199 Constant src = g.ToConstant(source);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001200 if (destination->IsRegister() || destination->IsStackSlot()) {
1201 Register dst =
1202 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001203 switch (src.type()) {
1204 case Constant::kInt32:
1205 __ mov(dst, Operand(src.ToInt32()));
1206 break;
1207 case Constant::kInt64:
1208 UNREACHABLE();
1209 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001210 case Constant::kFloat32:
1211 __ Move(dst,
1212 isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
1213 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001214 case Constant::kFloat64:
1215 __ Move(dst,
1216 isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
1217 break;
1218 case Constant::kExternalReference:
1219 __ mov(dst, Operand(src.ToExternalReference()));
1220 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001221 case Constant::kHeapObject: {
1222 Handle<HeapObject> src_object = src.ToHeapObject();
1223 Heap::RootListIndex index;
1224 int offset;
1225 if (IsMaterializableFromFrame(src_object, &offset)) {
1226 __ ldr(dst, MemOperand(fp, offset));
1227 } else if (IsMaterializableFromRoot(src_object, &index)) {
1228 __ LoadRoot(dst, index);
1229 } else {
1230 __ Move(dst, src_object);
1231 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001232 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001233 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001234 case Constant::kRpoNumber:
1235 UNREACHABLE(); // TODO(dcarney): loading RPO constants on arm.
1236 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001237 }
1238 if (destination->IsStackSlot()) __ str(dst, g.ToMemOperand(destination));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001239 } else if (src.type() == Constant::kFloat32) {
1240 if (destination->IsDoubleStackSlot()) {
1241 MemOperand dst = g.ToMemOperand(destination);
1242 __ mov(ip, Operand(bit_cast<int32_t>(src.ToFloat32())));
1243 __ str(ip, dst);
1244 } else {
1245 SwVfpRegister dst = g.ToFloat32Register(destination);
1246 __ vmov(dst, src.ToFloat32());
1247 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001248 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001249 DCHECK_EQ(Constant::kFloat64, src.type());
1250 DwVfpRegister dst = destination->IsDoubleRegister()
1251 ? g.ToFloat64Register(destination)
1252 : kScratchDoubleReg;
1253 __ vmov(dst, src.ToFloat64(), kScratchReg);
1254 if (destination->IsDoubleStackSlot()) {
1255 __ vstr(dst, g.ToMemOperand(destination));
1256 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001257 }
1258 } else if (source->IsDoubleRegister()) {
1259 DwVfpRegister src = g.ToDoubleRegister(source);
1260 if (destination->IsDoubleRegister()) {
1261 DwVfpRegister dst = g.ToDoubleRegister(destination);
1262 __ Move(dst, src);
1263 } else {
1264 DCHECK(destination->IsDoubleStackSlot());
1265 __ vstr(src, g.ToMemOperand(destination));
1266 }
1267 } else if (source->IsDoubleStackSlot()) {
1268 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot());
1269 MemOperand src = g.ToMemOperand(source);
1270 if (destination->IsDoubleRegister()) {
1271 __ vldr(g.ToDoubleRegister(destination), src);
1272 } else {
1273 DwVfpRegister temp = kScratchDoubleReg;
1274 __ vldr(temp, src);
1275 __ vstr(temp, g.ToMemOperand(destination));
1276 }
1277 } else {
1278 UNREACHABLE();
1279 }
1280}
1281
1282
1283void CodeGenerator::AssembleSwap(InstructionOperand* source,
1284 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001285 ArmOperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001286 // Dispatch on the source and destination operand kinds. Not all
1287 // combinations are possible.
1288 if (source->IsRegister()) {
1289 // Register-register.
1290 Register temp = kScratchReg;
1291 Register src = g.ToRegister(source);
1292 if (destination->IsRegister()) {
1293 Register dst = g.ToRegister(destination);
1294 __ Move(temp, src);
1295 __ Move(src, dst);
1296 __ Move(dst, temp);
1297 } else {
1298 DCHECK(destination->IsStackSlot());
1299 MemOperand dst = g.ToMemOperand(destination);
1300 __ mov(temp, src);
1301 __ ldr(src, dst);
1302 __ str(temp, dst);
1303 }
1304 } else if (source->IsStackSlot()) {
1305 DCHECK(destination->IsStackSlot());
1306 Register temp_0 = kScratchReg;
1307 SwVfpRegister temp_1 = kScratchDoubleReg.low();
1308 MemOperand src = g.ToMemOperand(source);
1309 MemOperand dst = g.ToMemOperand(destination);
1310 __ ldr(temp_0, src);
1311 __ vldr(temp_1, dst);
1312 __ str(temp_0, dst);
1313 __ vstr(temp_1, src);
1314 } else if (source->IsDoubleRegister()) {
1315 DwVfpRegister temp = kScratchDoubleReg;
1316 DwVfpRegister src = g.ToDoubleRegister(source);
1317 if (destination->IsDoubleRegister()) {
1318 DwVfpRegister dst = g.ToDoubleRegister(destination);
1319 __ Move(temp, src);
1320 __ Move(src, dst);
1321 __ Move(dst, temp);
1322 } else {
1323 DCHECK(destination->IsDoubleStackSlot());
1324 MemOperand dst = g.ToMemOperand(destination);
1325 __ Move(temp, src);
1326 __ vldr(src, dst);
1327 __ vstr(temp, dst);
1328 }
1329 } else if (source->IsDoubleStackSlot()) {
1330 DCHECK(destination->IsDoubleStackSlot());
1331 Register temp_0 = kScratchReg;
1332 DwVfpRegister temp_1 = kScratchDoubleReg;
1333 MemOperand src0 = g.ToMemOperand(source);
1334 MemOperand src1(src0.rn(), src0.offset() + kPointerSize);
1335 MemOperand dst0 = g.ToMemOperand(destination);
1336 MemOperand dst1(dst0.rn(), dst0.offset() + kPointerSize);
1337 __ vldr(temp_1, dst0); // Save destination in temp_1.
1338 __ ldr(temp_0, src0); // Then use temp_0 to copy source to destination.
1339 __ str(temp_0, dst0);
1340 __ ldr(temp_0, src1);
1341 __ str(temp_0, dst1);
1342 __ vstr(temp_1, src0);
1343 } else {
1344 // No other combinations are possible.
1345 UNREACHABLE();
1346 }
1347}
1348
1349
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001350void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
1351 // On 32-bit ARM we emit the jump tables inline.
1352 UNREACHABLE();
1353}
1354
1355
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001356void CodeGenerator::AddNopForSmiCodeInlining() {
1357 // On 32-bit ARM we do not insert nops for inlined Smi code.
1358}
1359
1360
1361void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001362 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
1363 return;
1364 }
1365
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001366 int space_needed = Deoptimizer::patch_size();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001367 // Ensure that we have enough space after the previous lazy-bailout
1368 // instruction for patching the code here.
1369 int current_pc = masm()->pc_offset();
1370 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
1371 // Block literal pool emission for duration of padding.
1372 v8::internal::Assembler::BlockConstPoolScope block_const_pool(masm());
1373 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
1374 DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
1375 while (padding_size > 0) {
1376 __ nop();
1377 padding_size -= v8::internal::Assembler::kInstrSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001378 }
1379 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001380}
1381
1382#undef __
1383
1384} // namespace compiler
1385} // namespace internal
1386} // namespace v8