blob: bdf4c471656c5c0c03fb0bec192f8f662a9d9856 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
7#include "src/arm/macro-assembler-arm.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/ast/scopes.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/compiler/code-generator-impl.h"
10#include "src/compiler/gap-resolver.h"
11#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012#include "src/compiler/osr.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013
14namespace v8 {
15namespace internal {
16namespace compiler {
17
18#define __ masm()->
19
20
21#define kScratchReg r9
22
23
24// Adds Arm-specific methods to convert InstructionOperands.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000025class ArmOperandConverter final : public InstructionOperandConverter {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000026 public:
27 ArmOperandConverter(CodeGenerator* gen, Instruction* instr)
28 : InstructionOperandConverter(gen, instr) {}
29
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000030 SwVfpRegister OutputFloat32Register(size_t index = 0) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040031 return ToFloat32Register(instr_->OutputAt(index));
32 }
33
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000034 SwVfpRegister InputFloat32Register(size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040035 return ToFloat32Register(instr_->InputAt(index));
36 }
37
38 SwVfpRegister ToFloat32Register(InstructionOperand* op) {
39 return ToFloat64Register(op).low();
40 }
41
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 LowDwVfpRegister OutputFloat64Register(size_t index = 0) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040043 return ToFloat64Register(instr_->OutputAt(index));
44 }
45
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000046 LowDwVfpRegister InputFloat64Register(size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040047 return ToFloat64Register(instr_->InputAt(index));
48 }
49
50 LowDwVfpRegister ToFloat64Register(InstructionOperand* op) {
51 return LowDwVfpRegister::from_code(ToDoubleRegister(op).code());
52 }
53
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054 SBit OutputSBit() const {
55 switch (instr_->flags_mode()) {
56 case kFlags_branch:
57 case kFlags_set:
58 return SetCC;
59 case kFlags_none:
60 return LeaveCC;
61 }
62 UNREACHABLE();
63 return LeaveCC;
64 }
65
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000066 Operand InputImmediate(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000067 Constant constant = ToConstant(instr_->InputAt(index));
68 switch (constant.type()) {
69 case Constant::kInt32:
70 return Operand(constant.ToInt32());
Emily Bernierd0a1eb72015-03-24 16:35:39 -040071 case Constant::kFloat32:
72 return Operand(
73 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074 case Constant::kFloat64:
75 return Operand(
76 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
77 case Constant::kInt64:
78 case Constant::kExternalReference:
79 case Constant::kHeapObject:
Emily Bernierd0a1eb72015-03-24 16:35:39 -040080 case Constant::kRpoNumber:
Ben Murdochb8a8cc12014-11-26 15:28:44 +000081 break;
82 }
83 UNREACHABLE();
84 return Operand::Zero();
85 }
86
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000087 Operand InputOperand2(size_t first_index) {
88 const size_t index = first_index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089 switch (AddressingModeField::decode(instr_->opcode())) {
90 case kMode_None:
91 case kMode_Offset_RI:
92 case kMode_Offset_RR:
93 break;
94 case kMode_Operand2_I:
95 return InputImmediate(index + 0);
96 case kMode_Operand2_R:
97 return Operand(InputRegister(index + 0));
98 case kMode_Operand2_R_ASR_I:
99 return Operand(InputRegister(index + 0), ASR, InputInt5(index + 1));
100 case kMode_Operand2_R_ASR_R:
101 return Operand(InputRegister(index + 0), ASR, InputRegister(index + 1));
102 case kMode_Operand2_R_LSL_I:
103 return Operand(InputRegister(index + 0), LSL, InputInt5(index + 1));
104 case kMode_Operand2_R_LSL_R:
105 return Operand(InputRegister(index + 0), LSL, InputRegister(index + 1));
106 case kMode_Operand2_R_LSR_I:
107 return Operand(InputRegister(index + 0), LSR, InputInt5(index + 1));
108 case kMode_Operand2_R_LSR_R:
109 return Operand(InputRegister(index + 0), LSR, InputRegister(index + 1));
110 case kMode_Operand2_R_ROR_I:
111 return Operand(InputRegister(index + 0), ROR, InputInt5(index + 1));
112 case kMode_Operand2_R_ROR_R:
113 return Operand(InputRegister(index + 0), ROR, InputRegister(index + 1));
114 }
115 UNREACHABLE();
116 return Operand::Zero();
117 }
118
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000119 MemOperand InputOffset(size_t* first_index) {
120 const size_t index = *first_index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000121 switch (AddressingModeField::decode(instr_->opcode())) {
122 case kMode_None:
123 case kMode_Operand2_I:
124 case kMode_Operand2_R:
125 case kMode_Operand2_R_ASR_I:
126 case kMode_Operand2_R_ASR_R:
127 case kMode_Operand2_R_LSL_I:
128 case kMode_Operand2_R_LSL_R:
129 case kMode_Operand2_R_LSR_I:
130 case kMode_Operand2_R_LSR_R:
131 case kMode_Operand2_R_ROR_I:
132 case kMode_Operand2_R_ROR_R:
133 break;
134 case kMode_Offset_RI:
135 *first_index += 2;
136 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
137 case kMode_Offset_RR:
138 *first_index += 2;
139 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
140 }
141 UNREACHABLE();
142 return MemOperand(r0);
143 }
144
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000145 MemOperand InputOffset(size_t first_index = 0) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400146 return InputOffset(&first_index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000147 }
148
149 MemOperand ToMemOperand(InstructionOperand* op) const {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000150 DCHECK_NOT_NULL(op);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000151 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000152 FrameOffset offset = frame_access_state()->GetFrameOffset(
153 AllocatedOperand::cast(op)->index());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
155 }
156};
157
158
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400159namespace {
160
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000161class OutOfLineLoadFloat32 final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400162 public:
163 OutOfLineLoadFloat32(CodeGenerator* gen, SwVfpRegister result)
164 : OutOfLineCode(gen), result_(result) {}
165
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000166 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400167 __ vmov(result_, std::numeric_limits<float>::quiet_NaN());
168 }
169
170 private:
171 SwVfpRegister const result_;
172};
173
174
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000175class OutOfLineLoadFloat64 final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400176 public:
177 OutOfLineLoadFloat64(CodeGenerator* gen, DwVfpRegister result)
178 : OutOfLineCode(gen), result_(result) {}
179
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000180 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400181 __ vmov(result_, std::numeric_limits<double>::quiet_NaN(), kScratchReg);
182 }
183
184 private:
185 DwVfpRegister const result_;
186};
187
188
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000189class OutOfLineLoadInteger final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400190 public:
191 OutOfLineLoadInteger(CodeGenerator* gen, Register result)
192 : OutOfLineCode(gen), result_(result) {}
193
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000194 void Generate() final { __ mov(result_, Operand::Zero()); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400195
196 private:
197 Register const result_;
198};
199
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200
201class OutOfLineRecordWrite final : public OutOfLineCode {
202 public:
203 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Register index,
204 Register value, Register scratch0, Register scratch1,
205 RecordWriteMode mode)
206 : OutOfLineCode(gen),
207 object_(object),
208 index_(index),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100209 index_immediate_(0),
210 value_(value),
211 scratch0_(scratch0),
212 scratch1_(scratch1),
213 mode_(mode) {}
214
215 OutOfLineRecordWrite(CodeGenerator* gen, Register object, int32_t index,
216 Register value, Register scratch0, Register scratch1,
217 RecordWriteMode mode)
218 : OutOfLineCode(gen),
219 object_(object),
220 index_(no_reg),
221 index_immediate_(index),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000222 value_(value),
223 scratch0_(scratch0),
224 scratch1_(scratch1),
225 mode_(mode) {}
226
227 void Generate() final {
228 if (mode_ > RecordWriteMode::kValueIsPointer) {
229 __ JumpIfSmi(value_, exit());
230 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100231 __ CheckPageFlag(value_, scratch0_,
232 MemoryChunk::kPointersToHereAreInterestingMask, eq,
233 exit());
234 RememberedSetAction const remembered_set_action =
235 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
236 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000237 SaveFPRegsMode const save_fp_mode =
238 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100239 if (!frame()->needs_frame()) {
240 // We need to save and restore lr if the frame was elided.
241 __ Push(lr);
242 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000243 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100244 remembered_set_action, save_fp_mode);
245 if (index_.is(no_reg)) {
246 __ add(scratch1_, object_, Operand(index_immediate_));
247 } else {
248 DCHECK_EQ(0, index_immediate_);
249 __ add(scratch1_, object_, Operand(index_));
250 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100252 if (!frame()->needs_frame()) {
253 __ Pop(lr);
254 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000255 }
256
257 private:
258 Register const object_;
259 Register const index_;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100260 int32_t const index_immediate_; // Valid if index_.is(no_reg).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261 Register const value_;
262 Register const scratch0_;
263 Register const scratch1_;
264 RecordWriteMode const mode_;
265};
266
267
268Condition FlagsConditionToCondition(FlagsCondition condition) {
269 switch (condition) {
270 case kEqual:
271 return eq;
272 case kNotEqual:
273 return ne;
274 case kSignedLessThan:
275 return lt;
276 case kSignedGreaterThanOrEqual:
277 return ge;
278 case kSignedLessThanOrEqual:
279 return le;
280 case kSignedGreaterThan:
281 return gt;
282 case kUnsignedLessThan:
283 return lo;
284 case kUnsignedGreaterThanOrEqual:
285 return hs;
286 case kUnsignedLessThanOrEqual:
287 return ls;
288 case kUnsignedGreaterThan:
289 return hi;
290 case kFloatLessThanOrUnordered:
291 return lt;
292 case kFloatGreaterThanOrEqual:
293 return ge;
294 case kFloatLessThanOrEqual:
295 return ls;
296 case kFloatGreaterThanOrUnordered:
297 return hi;
298 case kFloatLessThan:
299 return lo;
300 case kFloatGreaterThanOrEqualOrUnordered:
301 return hs;
302 case kFloatLessThanOrEqualOrUnordered:
303 return le;
304 case kFloatGreaterThan:
305 return gt;
306 case kOverflow:
307 return vs;
308 case kNotOverflow:
309 return vc;
310 default:
311 break;
312 }
313 UNREACHABLE();
314 return kNoCondition;
315}
316
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400317} // namespace
318
319
320#define ASSEMBLE_CHECKED_LOAD_FLOAT(width) \
321 do { \
322 auto result = i.OutputFloat##width##Register(); \
323 auto offset = i.InputRegister(0); \
324 if (instr->InputAt(1)->IsRegister()) { \
325 __ cmp(offset, i.InputRegister(1)); \
326 } else { \
327 __ cmp(offset, i.InputImmediate(1)); \
328 } \
329 auto ool = new (zone()) OutOfLineLoadFloat##width(this, result); \
330 __ b(hs, ool->entry()); \
331 __ vldr(result, i.InputOffset(2)); \
332 __ bind(ool->exit()); \
333 DCHECK_EQ(LeaveCC, i.OutputSBit()); \
334 } while (0)
335
336
337#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
338 do { \
339 auto result = i.OutputRegister(); \
340 auto offset = i.InputRegister(0); \
341 if (instr->InputAt(1)->IsRegister()) { \
342 __ cmp(offset, i.InputRegister(1)); \
343 } else { \
344 __ cmp(offset, i.InputImmediate(1)); \
345 } \
346 auto ool = new (zone()) OutOfLineLoadInteger(this, result); \
347 __ b(hs, ool->entry()); \
348 __ asm_instr(result, i.InputOffset(2)); \
349 __ bind(ool->exit()); \
350 DCHECK_EQ(LeaveCC, i.OutputSBit()); \
351 } while (0)
352
353
354#define ASSEMBLE_CHECKED_STORE_FLOAT(width) \
355 do { \
356 auto offset = i.InputRegister(0); \
357 if (instr->InputAt(1)->IsRegister()) { \
358 __ cmp(offset, i.InputRegister(1)); \
359 } else { \
360 __ cmp(offset, i.InputImmediate(1)); \
361 } \
362 auto value = i.InputFloat##width##Register(2); \
363 __ vstr(value, i.InputOffset(3), lo); \
364 DCHECK_EQ(LeaveCC, i.OutputSBit()); \
365 } while (0)
366
367
368#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
369 do { \
370 auto offset = i.InputRegister(0); \
371 if (instr->InputAt(1)->IsRegister()) { \
372 __ cmp(offset, i.InputRegister(1)); \
373 } else { \
374 __ cmp(offset, i.InputImmediate(1)); \
375 } \
376 auto value = i.InputRegister(2); \
377 __ asm_instr(value, i.InputOffset(3), lo); \
378 DCHECK_EQ(LeaveCC, i.OutputSBit()); \
379 } while (0)
380
381
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000382void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
383 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
384 if (sp_slot_delta > 0) {
385 __ add(sp, sp, Operand(sp_slot_delta * kPointerSize));
386 }
387 frame_access_state()->SetFrameAccessToDefault();
388}
389
390
391void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
392 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
393 if (sp_slot_delta < 0) {
394 __ sub(sp, sp, Operand(-sp_slot_delta * kPointerSize));
395 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
396 }
397 if (frame()->needs_frame()) {
398 if (FLAG_enable_embedded_constant_pool) {
399 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kConstantPoolOffset));
400 }
401 __ ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
402 __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
403 }
404 frame_access_state()->SetFrameAccessToSP();
405}
406
407
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000408// Assembles an instruction after register allocation, producing machine code.
409void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
410 ArmOperandConverter i(this, instr);
411
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000412 masm()->MaybeCheckConstPool();
413
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000414 switch (ArchOpcodeField::decode(instr->opcode())) {
415 case kArchCallCodeObject: {
416 EnsureSpaceForLazyDeopt();
417 if (instr->InputAt(0)->IsImmediate()) {
418 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
419 RelocInfo::CODE_TARGET);
420 } else {
421 __ add(ip, i.InputRegister(0),
422 Operand(Code::kHeaderSize - kHeapObjectTag));
423 __ Call(ip);
424 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000425 RecordCallPosition(instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000426 DCHECK_EQ(LeaveCC, i.OutputSBit());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000427 frame_access_state()->ClearSPDelta();
428 break;
429 }
430 case kArchTailCallCodeObject: {
431 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
432 AssembleDeconstructActivationRecord(stack_param_delta);
433 if (instr->InputAt(0)->IsImmediate()) {
434 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
435 RelocInfo::CODE_TARGET);
436 } else {
437 __ add(ip, i.InputRegister(0),
438 Operand(Code::kHeaderSize - kHeapObjectTag));
439 __ Jump(ip);
440 }
441 DCHECK_EQ(LeaveCC, i.OutputSBit());
442 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000443 break;
444 }
445 case kArchCallJSFunction: {
446 EnsureSpaceForLazyDeopt();
447 Register func = i.InputRegister(0);
448 if (FLAG_debug_code) {
449 // Check the function's context matches the context argument.
450 __ ldr(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
451 __ cmp(cp, kScratchReg);
452 __ Assert(eq, kWrongFunctionContext);
453 }
454 __ ldr(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
455 __ Call(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000456 RecordCallPosition(instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 DCHECK_EQ(LeaveCC, i.OutputSBit());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000458 frame_access_state()->ClearSPDelta();
459 break;
460 }
461 case kArchTailCallJSFunction: {
462 Register func = i.InputRegister(0);
463 if (FLAG_debug_code) {
464 // Check the function's context matches the context argument.
465 __ ldr(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
466 __ cmp(cp, kScratchReg);
467 __ Assert(eq, kWrongFunctionContext);
468 }
469 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
470 AssembleDeconstructActivationRecord(stack_param_delta);
471 __ ldr(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
472 __ Jump(ip);
473 DCHECK_EQ(LeaveCC, i.OutputSBit());
474 frame_access_state()->ClearSPDelta();
475 break;
476 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000477 case kArchPrepareCallCFunction: {
478 int const num_parameters = MiscField::decode(instr->opcode());
479 __ PrepareCallCFunction(num_parameters, kScratchReg);
480 // Frame alignment requires using FP-relative frame addressing.
481 frame_access_state()->SetFrameAccessToFP();
482 break;
483 }
484 case kArchPrepareTailCall:
485 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
486 break;
487 case kArchCallCFunction: {
488 int const num_parameters = MiscField::decode(instr->opcode());
489 if (instr->InputAt(0)->IsImmediate()) {
490 ExternalReference ref = i.InputExternalReference(0);
491 __ CallCFunction(ref, num_parameters);
492 } else {
493 Register func = i.InputRegister(0);
494 __ CallCFunction(func, num_parameters);
495 }
496 frame_access_state()->SetFrameAccessToDefault();
497 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000498 break;
499 }
500 case kArchJmp:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400501 AssembleArchJump(i.InputRpo(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000502 DCHECK_EQ(LeaveCC, i.OutputSBit());
503 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000504 case kArchLookupSwitch:
505 AssembleArchLookupSwitch(instr);
506 DCHECK_EQ(LeaveCC, i.OutputSBit());
507 break;
508 case kArchTableSwitch:
509 AssembleArchTableSwitch(instr);
510 DCHECK_EQ(LeaveCC, i.OutputSBit());
511 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000512 case kArchNop:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000513 case kArchThrowTerminator:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000514 // don't emit code for nops.
515 DCHECK_EQ(LeaveCC, i.OutputSBit());
516 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000517 case kArchDeoptimize: {
518 int deopt_state_id =
519 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
520 Deoptimizer::BailoutType bailout_type =
521 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
522 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
523 break;
524 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000525 case kArchRet:
526 AssembleReturn();
527 DCHECK_EQ(LeaveCC, i.OutputSBit());
528 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400529 case kArchStackPointer:
530 __ mov(i.OutputRegister(), sp);
531 DCHECK_EQ(LeaveCC, i.OutputSBit());
532 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000533 case kArchFramePointer:
534 __ mov(i.OutputRegister(), fp);
535 DCHECK_EQ(LeaveCC, i.OutputSBit());
536 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100537 case kArchParentFramePointer:
538 if (frame_access_state()->frame()->needs_frame()) {
539 __ ldr(i.OutputRegister(), MemOperand(fp, 0));
540 } else {
541 __ mov(i.OutputRegister(), fp);
542 }
543 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544 case kArchTruncateDoubleToI:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400545 __ TruncateDoubleToI(i.OutputRegister(), i.InputFloat64Register(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000546 DCHECK_EQ(LeaveCC, i.OutputSBit());
547 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000548 case kArchStoreWithWriteBarrier: {
549 RecordWriteMode mode =
550 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
551 Register object = i.InputRegister(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000552 Register value = i.InputRegister(2);
553 Register scratch0 = i.TempRegister(0);
554 Register scratch1 = i.TempRegister(1);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100555 OutOfLineRecordWrite* ool;
556
557 AddressingMode addressing_mode =
558 AddressingModeField::decode(instr->opcode());
559 if (addressing_mode == kMode_Offset_RI) {
560 int32_t index = i.InputInt32(1);
561 ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
562 scratch0, scratch1, mode);
563 __ str(value, MemOperand(object, index));
564 } else {
565 DCHECK_EQ(kMode_Offset_RR, addressing_mode);
566 Register index(i.InputRegister(1));
567 ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
568 scratch0, scratch1, mode);
569 __ str(value, MemOperand(object, index));
570 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000571 __ CheckPageFlag(object, scratch0,
572 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
573 ool->entry());
574 __ bind(ool->exit());
575 break;
576 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100577 case kArchStackSlot: {
578 FrameOffset offset =
579 frame_access_state()->GetFrameOffset(i.InputInt32(0));
580 Register base;
581 if (offset.from_stack_pointer()) {
582 base = sp;
583 } else {
584 base = fp;
585 }
586 __ add(i.OutputRegister(0), base, Operand(offset.offset()));
587 break;
588 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589 case kArmAdd:
590 __ add(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
591 i.OutputSBit());
592 break;
593 case kArmAnd:
594 __ and_(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
595 i.OutputSBit());
596 break;
597 case kArmBic:
598 __ bic(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
599 i.OutputSBit());
600 break;
601 case kArmMul:
602 __ mul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
603 i.OutputSBit());
604 break;
605 case kArmMla:
606 __ mla(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
607 i.InputRegister(2), i.OutputSBit());
608 break;
609 case kArmMls: {
610 CpuFeatureScope scope(masm(), MLS);
611 __ mls(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
612 i.InputRegister(2));
613 DCHECK_EQ(LeaveCC, i.OutputSBit());
614 break;
615 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400616 case kArmSmmul:
617 __ smmul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
618 DCHECK_EQ(LeaveCC, i.OutputSBit());
619 break;
620 case kArmSmmla:
621 __ smmla(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
622 i.InputRegister(2));
623 DCHECK_EQ(LeaveCC, i.OutputSBit());
624 break;
625 case kArmUmull:
626 __ umull(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
627 i.InputRegister(1), i.OutputSBit());
628 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000629 case kArmSdiv: {
630 CpuFeatureScope scope(masm(), SUDIV);
631 __ sdiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
632 DCHECK_EQ(LeaveCC, i.OutputSBit());
633 break;
634 }
635 case kArmUdiv: {
636 CpuFeatureScope scope(masm(), SUDIV);
637 __ udiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
638 DCHECK_EQ(LeaveCC, i.OutputSBit());
639 break;
640 }
641 case kArmMov:
642 __ Move(i.OutputRegister(), i.InputOperand2(0), i.OutputSBit());
643 break;
644 case kArmMvn:
645 __ mvn(i.OutputRegister(), i.InputOperand2(0), i.OutputSBit());
646 break;
647 case kArmOrr:
648 __ orr(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
649 i.OutputSBit());
650 break;
651 case kArmEor:
652 __ eor(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
653 i.OutputSBit());
654 break;
655 case kArmSub:
656 __ sub(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
657 i.OutputSBit());
658 break;
659 case kArmRsb:
660 __ rsb(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
661 i.OutputSBit());
662 break;
663 case kArmBfc: {
664 CpuFeatureScope scope(masm(), ARMv7);
665 __ bfc(i.OutputRegister(), i.InputInt8(1), i.InputInt8(2));
666 DCHECK_EQ(LeaveCC, i.OutputSBit());
667 break;
668 }
669 case kArmUbfx: {
670 CpuFeatureScope scope(masm(), ARMv7);
671 __ ubfx(i.OutputRegister(), i.InputRegister(0), i.InputInt8(1),
672 i.InputInt8(2));
673 DCHECK_EQ(LeaveCC, i.OutputSBit());
674 break;
675 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100676 case kArmSbfx: {
677 CpuFeatureScope scope(masm(), ARMv7);
678 __ sbfx(i.OutputRegister(), i.InputRegister(0), i.InputInt8(1),
679 i.InputInt8(2));
680 DCHECK_EQ(LeaveCC, i.OutputSBit());
681 break;
682 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400683 case kArmSxtb:
684 __ sxtb(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1));
685 DCHECK_EQ(LeaveCC, i.OutputSBit());
686 break;
687 case kArmSxth:
688 __ sxth(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1));
689 DCHECK_EQ(LeaveCC, i.OutputSBit());
690 break;
691 case kArmSxtab:
692 __ sxtab(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
693 i.InputInt32(2));
694 DCHECK_EQ(LeaveCC, i.OutputSBit());
695 break;
696 case kArmSxtah:
697 __ sxtah(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
698 i.InputInt32(2));
699 DCHECK_EQ(LeaveCC, i.OutputSBit());
700 break;
701 case kArmUxtb:
702 __ uxtb(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1));
703 DCHECK_EQ(LeaveCC, i.OutputSBit());
704 break;
705 case kArmUxth:
706 __ uxth(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1));
707 DCHECK_EQ(LeaveCC, i.OutputSBit());
708 break;
709 case kArmUxtab:
710 __ uxtab(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
711 i.InputInt32(2));
712 DCHECK_EQ(LeaveCC, i.OutputSBit());
713 break;
714 case kArmUxtah:
715 __ uxtah(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
716 i.InputInt32(2));
717 DCHECK_EQ(LeaveCC, i.OutputSBit());
718 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100719 case kArmRbit: {
720 CpuFeatureScope scope(masm(), ARMv7);
721 __ rbit(i.OutputRegister(), i.InputRegister(0));
722 DCHECK_EQ(LeaveCC, i.OutputSBit());
723 break;
724 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000725 case kArmClz:
726 __ clz(i.OutputRegister(), i.InputRegister(0));
727 DCHECK_EQ(LeaveCC, i.OutputSBit());
728 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729 case kArmCmp:
730 __ cmp(i.InputRegister(0), i.InputOperand2(1));
731 DCHECK_EQ(SetCC, i.OutputSBit());
732 break;
733 case kArmCmn:
734 __ cmn(i.InputRegister(0), i.InputOperand2(1));
735 DCHECK_EQ(SetCC, i.OutputSBit());
736 break;
737 case kArmTst:
738 __ tst(i.InputRegister(0), i.InputOperand2(1));
739 DCHECK_EQ(SetCC, i.OutputSBit());
740 break;
741 case kArmTeq:
742 __ teq(i.InputRegister(0), i.InputOperand2(1));
743 DCHECK_EQ(SetCC, i.OutputSBit());
744 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000745 case kArmVcmpF32:
746 if (instr->InputAt(1)->IsDoubleRegister()) {
747 __ VFPCompareAndSetFlags(i.InputFloat32Register(0),
748 i.InputFloat32Register(1));
749 } else {
750 DCHECK(instr->InputAt(1)->IsImmediate());
751 // 0.0 is the only immediate supported by vcmp instructions.
752 DCHECK(i.InputFloat32(1) == 0.0f);
753 __ VFPCompareAndSetFlags(i.InputFloat32Register(0), i.InputFloat32(1));
754 }
755 DCHECK_EQ(SetCC, i.OutputSBit());
756 break;
757 case kArmVaddF32:
758 __ vadd(i.OutputFloat32Register(), i.InputFloat32Register(0),
759 i.InputFloat32Register(1));
760 DCHECK_EQ(LeaveCC, i.OutputSBit());
761 break;
762 case kArmVsubF32:
763 __ vsub(i.OutputFloat32Register(), i.InputFloat32Register(0),
764 i.InputFloat32Register(1));
765 DCHECK_EQ(LeaveCC, i.OutputSBit());
766 break;
767 case kArmVmulF32:
768 __ vmul(i.OutputFloat32Register(), i.InputFloat32Register(0),
769 i.InputFloat32Register(1));
770 DCHECK_EQ(LeaveCC, i.OutputSBit());
771 break;
772 case kArmVmlaF32:
773 __ vmla(i.OutputFloat32Register(), i.InputFloat32Register(1),
774 i.InputFloat32Register(2));
775 DCHECK_EQ(LeaveCC, i.OutputSBit());
776 break;
777 case kArmVmlsF32:
778 __ vmls(i.OutputFloat32Register(), i.InputFloat32Register(1),
779 i.InputFloat32Register(2));
780 DCHECK_EQ(LeaveCC, i.OutputSBit());
781 break;
782 case kArmVdivF32:
783 __ vdiv(i.OutputFloat32Register(), i.InputFloat32Register(0),
784 i.InputFloat32Register(1));
785 DCHECK_EQ(LeaveCC, i.OutputSBit());
786 break;
787 case kArmVsqrtF32:
788 __ vsqrt(i.OutputFloat32Register(), i.InputFloat32Register(0));
789 break;
790 case kArmVabsF32:
791 __ vabs(i.OutputFloat32Register(), i.InputFloat32Register(0));
792 break;
793 case kArmVnegF32:
794 __ vneg(i.OutputFloat32Register(), i.InputFloat32Register(0));
795 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000796 case kArmVcmpF64:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000797 if (instr->InputAt(1)->IsDoubleRegister()) {
798 __ VFPCompareAndSetFlags(i.InputFloat64Register(0),
799 i.InputFloat64Register(1));
800 } else {
801 DCHECK(instr->InputAt(1)->IsImmediate());
802 // 0.0 is the only immediate supported by vcmp instructions.
803 DCHECK(i.InputDouble(1) == 0.0);
804 __ VFPCompareAndSetFlags(i.InputFloat64Register(0), i.InputDouble(1));
805 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000806 DCHECK_EQ(SetCC, i.OutputSBit());
807 break;
808 case kArmVaddF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400809 __ vadd(i.OutputFloat64Register(), i.InputFloat64Register(0),
810 i.InputFloat64Register(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000811 DCHECK_EQ(LeaveCC, i.OutputSBit());
812 break;
813 case kArmVsubF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400814 __ vsub(i.OutputFloat64Register(), i.InputFloat64Register(0),
815 i.InputFloat64Register(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000816 DCHECK_EQ(LeaveCC, i.OutputSBit());
817 break;
818 case kArmVmulF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400819 __ vmul(i.OutputFloat64Register(), i.InputFloat64Register(0),
820 i.InputFloat64Register(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000821 DCHECK_EQ(LeaveCC, i.OutputSBit());
822 break;
823 case kArmVmlaF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400824 __ vmla(i.OutputFloat64Register(), i.InputFloat64Register(1),
825 i.InputFloat64Register(2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000826 DCHECK_EQ(LeaveCC, i.OutputSBit());
827 break;
828 case kArmVmlsF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400829 __ vmls(i.OutputFloat64Register(), i.InputFloat64Register(1),
830 i.InputFloat64Register(2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000831 DCHECK_EQ(LeaveCC, i.OutputSBit());
832 break;
833 case kArmVdivF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400834 __ vdiv(i.OutputFloat64Register(), i.InputFloat64Register(0),
835 i.InputFloat64Register(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000836 DCHECK_EQ(LeaveCC, i.OutputSBit());
837 break;
838 case kArmVmodF64: {
839 // TODO(bmeurer): We should really get rid of this special instruction,
840 // and generate a CallAddress instruction instead.
841 FrameScope scope(masm(), StackFrame::MANUAL);
842 __ PrepareCallCFunction(0, 2, kScratchReg);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400843 __ MovToFloatParameters(i.InputFloat64Register(0),
844 i.InputFloat64Register(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
846 0, 2);
847 // Move the result in the double result register.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400848 __ MovFromFloatResult(i.OutputFloat64Register());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000849 DCHECK_EQ(LeaveCC, i.OutputSBit());
850 break;
851 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000852 case kArmVsqrtF64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400853 __ vsqrt(i.OutputFloat64Register(), i.InputFloat64Register(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000854 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000855 case kArmVabsF64:
856 __ vabs(i.OutputFloat64Register(), i.InputFloat64Register(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400857 break;
858 case kArmVnegF64:
859 __ vneg(i.OutputFloat64Register(), i.InputFloat64Register(0));
860 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000861 case kArmVrintmF32:
862 __ vrintm(i.OutputFloat32Register(), i.InputFloat32Register(0));
863 break;
864 case kArmVrintmF64:
865 __ vrintm(i.OutputFloat64Register(), i.InputFloat64Register(0));
866 break;
867 case kArmVrintpF32:
868 __ vrintp(i.OutputFloat32Register(), i.InputFloat32Register(0));
869 break;
870 case kArmVrintpF64:
871 __ vrintp(i.OutputFloat64Register(), i.InputFloat64Register(0));
872 break;
873 case kArmVrintzF32:
874 __ vrintz(i.OutputFloat32Register(), i.InputFloat32Register(0));
875 break;
876 case kArmVrintzF64:
877 __ vrintz(i.OutputFloat64Register(), i.InputFloat64Register(0));
878 break;
879 case kArmVrintaF64:
880 __ vrinta(i.OutputFloat64Register(), i.InputFloat64Register(0));
881 break;
882 case kArmVrintnF32:
883 __ vrintn(i.OutputFloat32Register(), i.InputFloat32Register(0));
884 break;
885 case kArmVrintnF64:
886 __ vrintn(i.OutputFloat64Register(), i.InputFloat64Register(0));
887 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400888 case kArmVcvtF32F64: {
889 __ vcvt_f32_f64(i.OutputFloat32Register(), i.InputFloat64Register(0));
890 DCHECK_EQ(LeaveCC, i.OutputSBit());
891 break;
892 }
893 case kArmVcvtF64F32: {
894 __ vcvt_f64_f32(i.OutputFloat64Register(), i.InputFloat32Register(0));
895 DCHECK_EQ(LeaveCC, i.OutputSBit());
896 break;
897 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100898 case kArmVcvtF32S32: {
899 SwVfpRegister scratch = kScratchDoubleReg.low();
900 __ vmov(scratch, i.InputRegister(0));
901 __ vcvt_f32_s32(i.OutputFloat32Register(), scratch);
902 DCHECK_EQ(LeaveCC, i.OutputSBit());
903 break;
904 }
905 case kArmVcvtF32U32: {
906 SwVfpRegister scratch = kScratchDoubleReg.low();
907 __ vmov(scratch, i.InputRegister(0));
908 __ vcvt_f32_u32(i.OutputFloat32Register(), scratch);
909 DCHECK_EQ(LeaveCC, i.OutputSBit());
910 break;
911 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000912 case kArmVcvtF64S32: {
913 SwVfpRegister scratch = kScratchDoubleReg.low();
914 __ vmov(scratch, i.InputRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400915 __ vcvt_f64_s32(i.OutputFloat64Register(), scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000916 DCHECK_EQ(LeaveCC, i.OutputSBit());
917 break;
918 }
919 case kArmVcvtF64U32: {
920 SwVfpRegister scratch = kScratchDoubleReg.low();
921 __ vmov(scratch, i.InputRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400922 __ vcvt_f64_u32(i.OutputFloat64Register(), scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000923 DCHECK_EQ(LeaveCC, i.OutputSBit());
924 break;
925 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100926 case kArmVcvtS32F32: {
927 SwVfpRegister scratch = kScratchDoubleReg.low();
928 __ vcvt_s32_f32(scratch, i.InputFloat32Register(0));
929 __ vmov(i.OutputRegister(), scratch);
930 DCHECK_EQ(LeaveCC, i.OutputSBit());
931 break;
932 }
933 case kArmVcvtU32F32: {
934 SwVfpRegister scratch = kScratchDoubleReg.low();
935 __ vcvt_u32_f32(scratch, i.InputFloat32Register(0));
936 __ vmov(i.OutputRegister(), scratch);
937 DCHECK_EQ(LeaveCC, i.OutputSBit());
938 break;
939 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000940 case kArmVcvtS32F64: {
941 SwVfpRegister scratch = kScratchDoubleReg.low();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400942 __ vcvt_s32_f64(scratch, i.InputFloat64Register(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000943 __ vmov(i.OutputRegister(), scratch);
944 DCHECK_EQ(LeaveCC, i.OutputSBit());
945 break;
946 }
947 case kArmVcvtU32F64: {
948 SwVfpRegister scratch = kScratchDoubleReg.low();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400949 __ vcvt_u32_f64(scratch, i.InputFloat64Register(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000950 __ vmov(i.OutputRegister(), scratch);
951 DCHECK_EQ(LeaveCC, i.OutputSBit());
952 break;
953 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000954 case kArmVmovLowU32F64:
955 __ VmovLow(i.OutputRegister(), i.InputFloat64Register(0));
956 DCHECK_EQ(LeaveCC, i.OutputSBit());
957 break;
958 case kArmVmovLowF64U32:
959 __ VmovLow(i.OutputFloat64Register(), i.InputRegister(1));
960 DCHECK_EQ(LeaveCC, i.OutputSBit());
961 break;
962 case kArmVmovHighU32F64:
963 __ VmovHigh(i.OutputRegister(), i.InputFloat64Register(0));
964 DCHECK_EQ(LeaveCC, i.OutputSBit());
965 break;
966 case kArmVmovHighF64U32:
967 __ VmovHigh(i.OutputFloat64Register(), i.InputRegister(1));
968 DCHECK_EQ(LeaveCC, i.OutputSBit());
969 break;
970 case kArmVmovF64U32U32:
971 __ vmov(i.OutputFloat64Register(), i.InputRegister(0),
972 i.InputRegister(1));
973 DCHECK_EQ(LeaveCC, i.OutputSBit());
974 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000975 case kArmLdrb:
976 __ ldrb(i.OutputRegister(), i.InputOffset());
977 DCHECK_EQ(LeaveCC, i.OutputSBit());
978 break;
979 case kArmLdrsb:
980 __ ldrsb(i.OutputRegister(), i.InputOffset());
981 DCHECK_EQ(LeaveCC, i.OutputSBit());
982 break;
983 case kArmStrb: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000984 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000985 MemOperand operand = i.InputOffset(&index);
986 __ strb(i.InputRegister(index), operand);
987 DCHECK_EQ(LeaveCC, i.OutputSBit());
988 break;
989 }
990 case kArmLdrh:
991 __ ldrh(i.OutputRegister(), i.InputOffset());
992 break;
993 case kArmLdrsh:
994 __ ldrsh(i.OutputRegister(), i.InputOffset());
995 break;
996 case kArmStrh: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000997 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000998 MemOperand operand = i.InputOffset(&index);
999 __ strh(i.InputRegister(index), operand);
1000 DCHECK_EQ(LeaveCC, i.OutputSBit());
1001 break;
1002 }
1003 case kArmLdr:
1004 __ ldr(i.OutputRegister(), i.InputOffset());
1005 break;
1006 case kArmStr: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001007 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001008 MemOperand operand = i.InputOffset(&index);
1009 __ str(i.InputRegister(index), operand);
1010 DCHECK_EQ(LeaveCC, i.OutputSBit());
1011 break;
1012 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001013 case kArmVldrF32: {
1014 __ vldr(i.OutputFloat32Register(), i.InputOffset());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001015 DCHECK_EQ(LeaveCC, i.OutputSBit());
1016 break;
1017 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001018 case kArmVstrF32: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001019 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001020 MemOperand operand = i.InputOffset(&index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001021 __ vstr(i.InputFloat32Register(index), operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001022 DCHECK_EQ(LeaveCC, i.OutputSBit());
1023 break;
1024 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001025 case kArmVldrF64:
1026 __ vldr(i.OutputFloat64Register(), i.InputOffset());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001027 DCHECK_EQ(LeaveCC, i.OutputSBit());
1028 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001029 case kArmVstrF64: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001030 size_t index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001031 MemOperand operand = i.InputOffset(&index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001032 __ vstr(i.InputFloat64Register(index), operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001033 DCHECK_EQ(LeaveCC, i.OutputSBit());
1034 break;
1035 }
1036 case kArmPush:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001037 if (instr->InputAt(0)->IsDoubleRegister()) {
1038 __ vpush(i.InputDoubleRegister(0));
1039 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1040 } else {
1041 __ push(i.InputRegister(0));
1042 frame_access_state()->IncreaseSPDelta(1);
1043 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001044 DCHECK_EQ(LeaveCC, i.OutputSBit());
1045 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001046 case kArmPoke: {
1047 int const slot = MiscField::decode(instr->opcode());
1048 __ str(i.InputRegister(0), MemOperand(sp, slot * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001049 DCHECK_EQ(LeaveCC, i.OutputSBit());
1050 break;
1051 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001052 case kCheckedLoadInt8:
1053 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrsb);
1054 break;
1055 case kCheckedLoadUint8:
1056 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrb);
1057 break;
1058 case kCheckedLoadInt16:
1059 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrsh);
1060 break;
1061 case kCheckedLoadUint16:
1062 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrh);
1063 break;
1064 case kCheckedLoadWord32:
1065 ASSEMBLE_CHECKED_LOAD_INTEGER(ldr);
1066 break;
1067 case kCheckedLoadFloat32:
1068 ASSEMBLE_CHECKED_LOAD_FLOAT(32);
1069 break;
1070 case kCheckedLoadFloat64:
1071 ASSEMBLE_CHECKED_LOAD_FLOAT(64);
1072 break;
1073 case kCheckedStoreWord8:
1074 ASSEMBLE_CHECKED_STORE_INTEGER(strb);
1075 break;
1076 case kCheckedStoreWord16:
1077 ASSEMBLE_CHECKED_STORE_INTEGER(strh);
1078 break;
1079 case kCheckedStoreWord32:
1080 ASSEMBLE_CHECKED_STORE_INTEGER(str);
1081 break;
1082 case kCheckedStoreFloat32:
1083 ASSEMBLE_CHECKED_STORE_FLOAT(32);
1084 break;
1085 case kCheckedStoreFloat64:
1086 ASSEMBLE_CHECKED_STORE_FLOAT(64);
1087 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001088 case kCheckedLoadWord64:
1089 case kCheckedStoreWord64:
1090 UNREACHABLE(); // currently unsupported checked int64 load/store.
1091 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001092 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001093} // NOLINT(readability/fn_size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001094
1095
1096// Assembles branches after an instruction.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001097void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001098 ArmOperandConverter i(this, instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001099 Label* tlabel = branch->true_label;
1100 Label* flabel = branch->false_label;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001101 Condition cc = FlagsConditionToCondition(branch->condition);
1102 __ b(cc, tlabel);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001103 if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
1104}
1105
1106
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001107void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001108 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001109}
1110
1111
1112// Assembles boolean materializations after an instruction.
1113void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1114 FlagsCondition condition) {
1115 ArmOperandConverter i(this, instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001116
1117 // Materialize a full 32-bit 1 or 0 value. The result register is always the
1118 // last output of the instruction.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001119 DCHECK_NE(0u, instr->OutputCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001120 Register reg = i.OutputRegister(instr->OutputCount() - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001121 Condition cc = FlagsConditionToCondition(condition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001122 __ mov(reg, Operand(0));
1123 __ mov(reg, Operand(1), LeaveCC, cc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001124}
1125
1126
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001127void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1128 ArmOperandConverter i(this, instr);
1129 Register input = i.InputRegister(0);
1130 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1131 __ cmp(input, Operand(i.InputInt32(index + 0)));
1132 __ b(eq, GetLabel(i.InputRpo(index + 1)));
1133 }
1134 AssembleArchJump(i.InputRpo(1));
1135}
1136
1137
1138void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1139 ArmOperandConverter i(this, instr);
1140 Register input = i.InputRegister(0);
1141 size_t const case_count = instr->InputCount() - 2;
1142 // Ensure to emit the constant pool first if necessary.
1143 __ CheckConstPool(true, true);
1144 __ cmp(input, Operand(case_count));
1145 __ BlockConstPoolFor(case_count + 2);
1146 __ add(pc, pc, Operand(input, LSL, 2), LeaveCC, lo);
1147 __ b(GetLabel(i.InputRpo(1)));
1148 for (size_t index = 0; index < case_count; ++index) {
1149 __ b(GetLabel(i.InputRpo(index + 2)));
1150 }
1151}
1152
1153
1154void CodeGenerator::AssembleDeoptimizerCall(
1155 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001156 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001157 isolate(), deoptimization_id, bailout_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001158 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1159}
1160
1161
1162void CodeGenerator::AssemblePrologue() {
1163 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001164 if (descriptor->IsCFunctionCall()) {
1165 if (FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001166 __ Push(lr, fp, pp);
1167 // Adjust FP to point to saved FP.
1168 __ sub(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001169 } else {
1170 __ Push(lr, fp);
1171 __ mov(fp, sp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001172 }
1173 } else if (descriptor->IsJSFunctionCall()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001174 __ Prologue(this->info()->GeneratePreagedPrologue());
1175 } else if (frame()->needs_frame()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001176 __ StubPrologue();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001177 } else {
1178 frame()->SetElidedFrameSizeInSlots(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001179 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001180 frame_access_state()->SetFrameAccessToDefault();
1181
1182 int stack_shrink_slots = frame()->GetSpillSlotCount();
1183 if (info()->is_osr()) {
1184 // TurboFan OSR-compiled functions cannot be entered directly.
1185 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1186
1187 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1188 // frame is still on the stack. Optimized code uses OSR values directly from
1189 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1190 // remaining stack slots.
1191 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1192 osr_pc_offset_ = __ pc_offset();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001193 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
1194 }
1195
1196 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
1197 if (saves_fp != 0) {
1198 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots();
1199 }
1200 if (stack_shrink_slots > 0) {
1201 __ sub(sp, sp, Operand(stack_shrink_slots * kPointerSize));
1202 }
1203
1204 if (saves_fp != 0) {
1205 // Save callee-saved FP registers.
1206 STATIC_ASSERT(DwVfpRegister::kMaxNumRegisters == 32);
1207 uint32_t last = base::bits::CountLeadingZeros32(saves_fp) - 1;
1208 uint32_t first = base::bits::CountTrailingZeros32(saves_fp);
1209 DCHECK_EQ((last - first + 1), base::bits::CountPopulation32(saves_fp));
1210 __ vstm(db_w, sp, DwVfpRegister::from_code(first),
1211 DwVfpRegister::from_code(last));
1212 frame()->AllocateSavedCalleeRegisterSlots((last - first + 1) *
1213 (kDoubleSize / kPointerSize));
1214 }
1215 const RegList saves = FLAG_enable_embedded_constant_pool
1216 ? (descriptor->CalleeSavedRegisters() & ~pp.bit())
1217 : descriptor->CalleeSavedRegisters();
1218 if (saves != 0) {
1219 // Save callee-saved registers.
1220 __ stm(db_w, sp, saves);
1221 frame()->AllocateSavedCalleeRegisterSlots(
1222 base::bits::CountPopulation32(saves));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001223 }
1224}
1225
1226
1227void CodeGenerator::AssembleReturn() {
1228 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001229 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1230
1231 // Restore registers.
1232 const RegList saves = FLAG_enable_embedded_constant_pool
1233 ? (descriptor->CalleeSavedRegisters() & ~pp.bit())
1234 : descriptor->CalleeSavedRegisters();
1235 if (saves != 0) {
1236 __ ldm(ia_w, sp, saves);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001237 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001238
1239 // Restore FP registers.
1240 const RegList saves_fp = descriptor->CalleeSavedFPRegisters();
1241 if (saves_fp != 0) {
1242 STATIC_ASSERT(DwVfpRegister::kMaxNumRegisters == 32);
1243 uint32_t last = base::bits::CountLeadingZeros32(saves_fp) - 1;
1244 uint32_t first = base::bits::CountTrailingZeros32(saves_fp);
1245 __ vldm(ia_w, sp, DwVfpRegister::from_code(first),
1246 DwVfpRegister::from_code(last));
1247 }
1248
1249 if (descriptor->IsCFunctionCall()) {
1250 __ LeaveFrame(StackFrame::MANUAL);
1251 } else if (frame()->needs_frame()) {
1252 // Canonicalize JSFunction return sites for now.
1253 if (return_label_.is_bound()) {
1254 __ b(&return_label_);
1255 return;
1256 } else {
1257 __ bind(&return_label_);
1258 __ LeaveFrame(StackFrame::MANUAL);
1259 }
1260 }
1261 __ Ret(pop_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001262}
1263
1264
1265void CodeGenerator::AssembleMove(InstructionOperand* source,
1266 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001267 ArmOperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001268 // Dispatch on the source and destination operand kinds. Not all
1269 // combinations are possible.
1270 if (source->IsRegister()) {
1271 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1272 Register src = g.ToRegister(source);
1273 if (destination->IsRegister()) {
1274 __ mov(g.ToRegister(destination), src);
1275 } else {
1276 __ str(src, g.ToMemOperand(destination));
1277 }
1278 } else if (source->IsStackSlot()) {
1279 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1280 MemOperand src = g.ToMemOperand(source);
1281 if (destination->IsRegister()) {
1282 __ ldr(g.ToRegister(destination), src);
1283 } else {
1284 Register temp = kScratchReg;
1285 __ ldr(temp, src);
1286 __ str(temp, g.ToMemOperand(destination));
1287 }
1288 } else if (source->IsConstant()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001289 Constant src = g.ToConstant(source);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001290 if (destination->IsRegister() || destination->IsStackSlot()) {
1291 Register dst =
1292 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001293 switch (src.type()) {
1294 case Constant::kInt32:
1295 __ mov(dst, Operand(src.ToInt32()));
1296 break;
1297 case Constant::kInt64:
1298 UNREACHABLE();
1299 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001300 case Constant::kFloat32:
1301 __ Move(dst,
1302 isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
1303 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001304 case Constant::kFloat64:
1305 __ Move(dst,
1306 isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
1307 break;
1308 case Constant::kExternalReference:
1309 __ mov(dst, Operand(src.ToExternalReference()));
1310 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001311 case Constant::kHeapObject: {
1312 Handle<HeapObject> src_object = src.ToHeapObject();
1313 Heap::RootListIndex index;
1314 int offset;
1315 if (IsMaterializableFromFrame(src_object, &offset)) {
1316 __ ldr(dst, MemOperand(fp, offset));
1317 } else if (IsMaterializableFromRoot(src_object, &index)) {
1318 __ LoadRoot(dst, index);
1319 } else {
1320 __ Move(dst, src_object);
1321 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001322 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001323 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001324 case Constant::kRpoNumber:
1325 UNREACHABLE(); // TODO(dcarney): loading RPO constants on arm.
1326 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001327 }
1328 if (destination->IsStackSlot()) __ str(dst, g.ToMemOperand(destination));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001329 } else if (src.type() == Constant::kFloat32) {
1330 if (destination->IsDoubleStackSlot()) {
1331 MemOperand dst = g.ToMemOperand(destination);
1332 __ mov(ip, Operand(bit_cast<int32_t>(src.ToFloat32())));
1333 __ str(ip, dst);
1334 } else {
1335 SwVfpRegister dst = g.ToFloat32Register(destination);
1336 __ vmov(dst, src.ToFloat32());
1337 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001338 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001339 DCHECK_EQ(Constant::kFloat64, src.type());
1340 DwVfpRegister dst = destination->IsDoubleRegister()
1341 ? g.ToFloat64Register(destination)
1342 : kScratchDoubleReg;
1343 __ vmov(dst, src.ToFloat64(), kScratchReg);
1344 if (destination->IsDoubleStackSlot()) {
1345 __ vstr(dst, g.ToMemOperand(destination));
1346 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001347 }
1348 } else if (source->IsDoubleRegister()) {
1349 DwVfpRegister src = g.ToDoubleRegister(source);
1350 if (destination->IsDoubleRegister()) {
1351 DwVfpRegister dst = g.ToDoubleRegister(destination);
1352 __ Move(dst, src);
1353 } else {
1354 DCHECK(destination->IsDoubleStackSlot());
1355 __ vstr(src, g.ToMemOperand(destination));
1356 }
1357 } else if (source->IsDoubleStackSlot()) {
1358 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot());
1359 MemOperand src = g.ToMemOperand(source);
1360 if (destination->IsDoubleRegister()) {
1361 __ vldr(g.ToDoubleRegister(destination), src);
1362 } else {
1363 DwVfpRegister temp = kScratchDoubleReg;
1364 __ vldr(temp, src);
1365 __ vstr(temp, g.ToMemOperand(destination));
1366 }
1367 } else {
1368 UNREACHABLE();
1369 }
1370}
1371
1372
1373void CodeGenerator::AssembleSwap(InstructionOperand* source,
1374 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001375 ArmOperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001376 // Dispatch on the source and destination operand kinds. Not all
1377 // combinations are possible.
1378 if (source->IsRegister()) {
1379 // Register-register.
1380 Register temp = kScratchReg;
1381 Register src = g.ToRegister(source);
1382 if (destination->IsRegister()) {
1383 Register dst = g.ToRegister(destination);
1384 __ Move(temp, src);
1385 __ Move(src, dst);
1386 __ Move(dst, temp);
1387 } else {
1388 DCHECK(destination->IsStackSlot());
1389 MemOperand dst = g.ToMemOperand(destination);
1390 __ mov(temp, src);
1391 __ ldr(src, dst);
1392 __ str(temp, dst);
1393 }
1394 } else if (source->IsStackSlot()) {
1395 DCHECK(destination->IsStackSlot());
1396 Register temp_0 = kScratchReg;
1397 SwVfpRegister temp_1 = kScratchDoubleReg.low();
1398 MemOperand src = g.ToMemOperand(source);
1399 MemOperand dst = g.ToMemOperand(destination);
1400 __ ldr(temp_0, src);
1401 __ vldr(temp_1, dst);
1402 __ str(temp_0, dst);
1403 __ vstr(temp_1, src);
1404 } else if (source->IsDoubleRegister()) {
1405 DwVfpRegister temp = kScratchDoubleReg;
1406 DwVfpRegister src = g.ToDoubleRegister(source);
1407 if (destination->IsDoubleRegister()) {
1408 DwVfpRegister dst = g.ToDoubleRegister(destination);
1409 __ Move(temp, src);
1410 __ Move(src, dst);
1411 __ Move(dst, temp);
1412 } else {
1413 DCHECK(destination->IsDoubleStackSlot());
1414 MemOperand dst = g.ToMemOperand(destination);
1415 __ Move(temp, src);
1416 __ vldr(src, dst);
1417 __ vstr(temp, dst);
1418 }
1419 } else if (source->IsDoubleStackSlot()) {
1420 DCHECK(destination->IsDoubleStackSlot());
1421 Register temp_0 = kScratchReg;
1422 DwVfpRegister temp_1 = kScratchDoubleReg;
1423 MemOperand src0 = g.ToMemOperand(source);
1424 MemOperand src1(src0.rn(), src0.offset() + kPointerSize);
1425 MemOperand dst0 = g.ToMemOperand(destination);
1426 MemOperand dst1(dst0.rn(), dst0.offset() + kPointerSize);
1427 __ vldr(temp_1, dst0); // Save destination in temp_1.
1428 __ ldr(temp_0, src0); // Then use temp_0 to copy source to destination.
1429 __ str(temp_0, dst0);
1430 __ ldr(temp_0, src1);
1431 __ str(temp_0, dst1);
1432 __ vstr(temp_1, src0);
1433 } else {
1434 // No other combinations are possible.
1435 UNREACHABLE();
1436 }
1437}
1438
1439
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001440void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
1441 // On 32-bit ARM we emit the jump tables inline.
1442 UNREACHABLE();
1443}
1444
1445
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001446void CodeGenerator::AddNopForSmiCodeInlining() {
1447 // On 32-bit ARM we do not insert nops for inlined Smi code.
1448}
1449
1450
1451void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001452 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
1453 return;
1454 }
1455
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001456 int space_needed = Deoptimizer::patch_size();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001457 // Ensure that we have enough space after the previous lazy-bailout
1458 // instruction for patching the code here.
1459 int current_pc = masm()->pc_offset();
1460 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
1461 // Block literal pool emission for duration of padding.
1462 v8::internal::Assembler::BlockConstPoolScope block_const_pool(masm());
1463 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
1464 DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
1465 while (padding_size > 0) {
1466 __ nop();
1467 padding_size -= v8::internal::Assembler::kInstrSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001468 }
1469 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001470}
1471
1472#undef __
1473
1474} // namespace compiler
1475} // namespace internal
1476} // namespace v8