blob: c57c68c75f0b0d9e5469ff519be4a0d43e9b8049 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/arm64/assembler_arm64.h"
26#include "utils/assembler.h"
27#include "utils/stack_checks.h"
28
29
30using namespace vixl; // NOLINT(build/namespaces)
31
32#ifdef __
33#error "ARM64 Codegen VIXL macro-assembler macro already defined."
34#endif
35
36
37namespace art {
38
39namespace arm64 {
40
Alexandre Rames5319def2014-10-23 10:03:10 +010041// TODO: clean-up some of the constant definitions.
42static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
43static constexpr int kCurrentMethodStackOffset = 0;
44
45namespace {
Alexandre Ramesa89086e2014-11-07 17:13:25 +000046
47bool IsFPType(Primitive::Type type) {
48 return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble;
49}
50
51bool Is64BitType(Primitive::Type type) {
52 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
53}
54
Alexandre Rames5319def2014-10-23 10:03:10 +010055// Convenience helpers to ease conversion to and from VIXL operands.
56
57int VIXLRegCodeFromART(int code) {
58 // TODO: static check?
59 DCHECK_EQ(SP, 31);
60 DCHECK_EQ(WSP, 31);
61 DCHECK_EQ(XZR, 32);
62 DCHECK_EQ(WZR, 32);
63 if (code == SP) {
64 return vixl::kSPRegInternalCode;
65 }
66 if (code == XZR) {
67 return vixl::kZeroRegCode;
68 }
69 return code;
70}
71
72int ARTRegCodeFromVIXL(int code) {
73 // TODO: static check?
74 DCHECK_EQ(SP, 31);
75 DCHECK_EQ(WSP, 31);
76 DCHECK_EQ(XZR, 32);
77 DCHECK_EQ(WZR, 32);
78 if (code == vixl::kSPRegInternalCode) {
79 return SP;
80 }
81 if (code == vixl::kZeroRegCode) {
82 return XZR;
83 }
84 return code;
85}
86
87Register XRegisterFrom(Location location) {
88 return Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
89}
90
91Register WRegisterFrom(Location location) {
92 return Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
93}
94
95Register RegisterFrom(Location location, Primitive::Type type) {
96 DCHECK(type != Primitive::kPrimVoid && !IsFPType(type));
97 return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
98}
99
100Register OutputRegister(HInstruction* instr) {
101 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
102}
103
104Register InputRegisterAt(HInstruction* instr, int input_index) {
105 return RegisterFrom(instr->GetLocations()->InAt(input_index),
106 instr->InputAt(input_index)->GetType());
107}
108
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000109FPRegister DRegisterFrom(Location location) {
110 return FPRegister::DRegFromCode(location.reg());
111}
112
113FPRegister SRegisterFrom(Location location) {
114 return FPRegister::SRegFromCode(location.reg());
115}
116
117FPRegister FPRegisterFrom(Location location, Primitive::Type type) {
118 DCHECK(IsFPType(type));
119 return type == Primitive::kPrimDouble ? DRegisterFrom(location) : SRegisterFrom(location);
120}
121
122FPRegister OutputFPRegister(HInstruction* instr) {
123 return FPRegisterFrom(instr->GetLocations()->Out(), instr->GetType());
124}
125
126FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) {
127 return FPRegisterFrom(instr->GetLocations()->InAt(input_index),
128 instr->InputAt(input_index)->GetType());
129}
130
Alexandre Rames5319def2014-10-23 10:03:10 +0100131int64_t Int64ConstantFrom(Location location) {
132 HConstant* instr = location.GetConstant();
133 return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue()
134 : instr->AsLongConstant()->GetValue();
135}
136
137Operand OperandFrom(Location location, Primitive::Type type) {
138 if (location.IsRegister()) {
139 return Operand(RegisterFrom(location, type));
140 } else {
141 return Operand(Int64ConstantFrom(location));
142 }
143}
144
145Operand InputOperandAt(HInstruction* instr, int input_index) {
146 return OperandFrom(instr->GetLocations()->InAt(input_index),
147 instr->InputAt(input_index)->GetType());
148}
149
150MemOperand StackOperandFrom(Location location) {
151 return MemOperand(sp, location.GetStackIndex());
152}
153
154MemOperand HeapOperand(const Register& base, Offset offset) {
155 // A heap reference must be 32bit, so fit in a W register.
156 DCHECK(base.IsW());
157 return MemOperand(base.X(), offset.SizeValue());
158}
159
160MemOperand HeapOperandFrom(Location location, Primitive::Type type, Offset offset) {
161 return HeapOperand(RegisterFrom(location, type), offset);
162}
163
164Location LocationFrom(const Register& reg) {
165 return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
166}
167
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000168Location LocationFrom(const FPRegister& fpreg) {
169 return Location::FpuRegisterLocation(fpreg.code());
170}
171
Alexandre Rames5319def2014-10-23 10:03:10 +0100172} // namespace
173
174inline Condition ARM64Condition(IfCondition cond) {
175 switch (cond) {
176 case kCondEQ: return eq;
177 case kCondNE: return ne;
178 case kCondLT: return lt;
179 case kCondLE: return le;
180 case kCondGT: return gt;
181 case kCondGE: return ge;
182 default:
183 LOG(FATAL) << "Unknown if condition";
184 }
185 return nv; // Unreachable.
186}
187
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000188Location ARM64ReturnLocation(Primitive::Type return_type) {
189 DCHECK_NE(return_type, Primitive::kPrimVoid);
190 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
191 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
192 // but we use the exact registers for clarity.
193 if (return_type == Primitive::kPrimFloat) {
194 return LocationFrom(s0);
195 } else if (return_type == Primitive::kPrimDouble) {
196 return LocationFrom(d0);
197 } else if (return_type == Primitive::kPrimLong) {
198 return LocationFrom(x0);
199 } else {
200 return LocationFrom(w0);
201 }
202}
203
Alexandre Rames5319def2014-10-23 10:03:10 +0100204static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
205static constexpr size_t kRuntimeParameterCoreRegistersLength =
206 arraysize(kRuntimeParameterCoreRegisters);
207static const FPRegister kRuntimeParameterFpuRegisters[] = { };
208static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
209
210class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
211 public:
212 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
213
214 InvokeRuntimeCallingConvention()
215 : CallingConvention(kRuntimeParameterCoreRegisters,
216 kRuntimeParameterCoreRegistersLength,
217 kRuntimeParameterFpuRegisters,
218 kRuntimeParameterFpuRegistersLength) {}
219
220 Location GetReturnLocation(Primitive::Type return_type);
221
222 private:
223 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
224};
225
226Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000227 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100228}
229
230#define __ reinterpret_cast<Arm64Assembler*>(codegen->GetAssembler())->vixl_masm_->
231
232class SlowPathCodeARM64 : public SlowPathCode {
233 public:
234 SlowPathCodeARM64() : entry_label_(), exit_label_() {}
235
236 vixl::Label* GetEntryLabel() { return &entry_label_; }
237 vixl::Label* GetExitLabel() { return &exit_label_; }
238
239 private:
240 vixl::Label entry_label_;
241 vixl::Label exit_label_;
242
243 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
244};
245
246class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
247 public:
248 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
249 Location index_location,
250 Location length_location)
251 : instruction_(instruction),
252 index_location_(index_location),
253 length_location_(length_location) {}
254
255 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
256 CodeGeneratorARM64* arm64_codegen = reinterpret_cast<CodeGeneratorARM64*>(codegen);
257 __ Bind(GetEntryLabel());
258 InvokeRuntimeCallingConvention calling_convention;
259 arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(0)),
260 index_location_, Primitive::kPrimInt);
261 arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(1)),
262 length_location_, Primitive::kPrimInt);
263 size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowArrayBounds).SizeValue();
264 __ Ldr(lr, MemOperand(tr, offset));
265 __ Blr(lr);
266 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
267 }
268
269 private:
270 HBoundsCheck* const instruction_;
271 const Location index_location_;
272 const Location length_location_;
273
274 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
275};
276
277class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
278 public:
279 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
280
281 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
282 __ Bind(GetEntryLabel());
283 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowNullPointer).Int32Value();
284 __ Ldr(lr, MemOperand(tr, offset));
285 __ Blr(lr);
286 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
287 }
288
289 private:
290 HNullCheck* const instruction_;
291
292 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
293};
294
295class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
296 public:
297 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
298 HBasicBlock* successor)
299 : instruction_(instruction), successor_(successor) {}
300
301 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
302 size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pTestSuspend).SizeValue();
303 __ Bind(GetEntryLabel());
304 __ Ldr(lr, MemOperand(tr, offset));
305 __ Blr(lr);
306 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
307 __ B(GetReturnLabel());
308 }
309
310 vixl::Label* GetReturnLabel() {
311 DCHECK(successor_ == nullptr);
312 return &return_label_;
313 }
314
315
316 private:
317 HSuspendCheck* const instruction_;
318 // If not null, the block to branch to after the suspend check.
319 HBasicBlock* const successor_;
320
321 // If `successor_` is null, the label to branch to after the suspend check.
322 vixl::Label return_label_;
323
324 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
325};
326
327#undef __
328
329Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
330 Location next_location;
331 if (type == Primitive::kPrimVoid) {
332 LOG(FATAL) << "Unreachable type " << type;
333 }
334
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000335 if (IsFPType(type) && (fp_index_ < calling_convention.GetNumberOfFpuRegisters())) {
336 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(fp_index_++));
337 } else if (!IsFPType(type) && (gp_index_ < calling_convention.GetNumberOfRegisters())) {
338 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
339 } else {
340 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
341 next_location = Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
342 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100343 }
344
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000345 // Space on the stack is reserved for all arguments.
346 stack_index_ += Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100347 return next_location;
348}
349
350CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph)
351 : CodeGenerator(graph,
352 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000353 kNumberOfAllocatableFPRegisters,
Alexandre Rames5319def2014-10-23 10:03:10 +0100354 kNumberOfAllocatableRegisterPairs),
355 block_labels_(nullptr),
356 location_builder_(graph, this),
357 instruction_visitor_(graph, this) {}
358
359#define __ reinterpret_cast<Arm64Assembler*>(GetAssembler())->vixl_masm_->
360
361void CodeGeneratorARM64::GenerateFrameEntry() {
362 // TODO: Add proper support for the stack overflow check.
363 UseScratchRegisterScope temps(assembler_.vixl_masm_);
364 Register temp = temps.AcquireX();
365 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
366 __ Ldr(temp, MemOperand(temp, 0));
367 RecordPcInfo(nullptr, 0);
368
369 CPURegList preserved_regs = GetFramePreservedRegisters();
370 int frame_size = GetFrameSize();
371 core_spill_mask_ |= preserved_regs.list();
372
373 __ Str(w0, MemOperand(sp, -frame_size, PreIndex));
374 __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
375
376 // Stack layout:
377 // sp[frame_size - 8] : lr.
378 // ... : other preserved registers.
379 // sp[frame_size - regs_size]: first preserved register.
380 // ... : reserved frame space.
381 // sp[0] : context pointer.
382}
383
384void CodeGeneratorARM64::GenerateFrameExit() {
385 int frame_size = GetFrameSize();
386 CPURegList preserved_regs = GetFramePreservedRegisters();
387 __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
388 __ Drop(frame_size);
389}
390
391void CodeGeneratorARM64::Bind(HBasicBlock* block) {
392 __ Bind(GetLabelOf(block));
393}
394
395void CodeGeneratorARM64::MoveHelper(Location destination,
396 Location source,
397 Primitive::Type type) {
398 if (source.Equals(destination)) {
399 return;
400 }
401 if (destination.IsRegister()) {
402 Register dst = RegisterFrom(destination, type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000403 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
404 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
Alexandre Rames5319def2014-10-23 10:03:10 +0100405 __ Ldr(dst, StackOperandFrom(source));
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000406 } else {
407 __ Mov(dst, OperandFrom(source, type));
408 }
409 } else if (destination.IsFpuRegister()) {
410 FPRegister dst = FPRegisterFrom(destination, type);
411 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
412 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
413 __ Ldr(dst, StackOperandFrom(source));
414 } else if (source.IsFpuRegister()) {
415 __ Fmov(dst, FPRegisterFrom(source, type));
416 } else {
417 HConstant* cst = source.GetConstant();
418 if (cst->IsFloatConstant()) {
419 __ Fmov(dst, cst->AsFloatConstant()->GetValue());
420 } else {
421 DCHECK(cst->IsDoubleConstant());
422 __ Fmov(dst, cst->AsDoubleConstant()->GetValue());
423 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100424 }
425 } else {
426 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
427 if (source.IsRegister()) {
428 __ Str(RegisterFrom(source, type), StackOperandFrom(destination));
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000429 } else if (source.IsFpuRegister()) {
430 __ Str(FPRegisterFrom(source, type), StackOperandFrom(destination));
Alexandre Rames5319def2014-10-23 10:03:10 +0100431 } else {
432 UseScratchRegisterScope temps(assembler_.vixl_masm_);
433 Register temp = destination.IsDoubleStackSlot() ? temps.AcquireX() : temps.AcquireW();
434 __ Ldr(temp, StackOperandFrom(source));
435 __ Str(temp, StackOperandFrom(destination));
436 }
437 }
438}
439
440void CodeGeneratorARM64::Move(HInstruction* instruction,
441 Location location,
442 HInstruction* move_for) {
443 LocationSummary* locations = instruction->GetLocations();
444 if (locations != nullptr && locations->Out().Equals(location)) {
445 return;
446 }
447
448 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000449 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100450
451 if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
452 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
453 : instruction->AsLongConstant()->GetValue();
454 if (location.IsRegister()) {
455 Register dst = RegisterFrom(location, type);
456 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
457 (instruction->IsLongConstant() && dst.Is64Bits()));
458 __ Mov(dst, value);
459 } else {
460 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
461 UseScratchRegisterScope temps(assembler_.vixl_masm_);
462 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
463 __ Mov(temp, value);
464 __ Str(temp, StackOperandFrom(location));
465 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000466 } else if (instruction->IsTemporary()) {
467 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
468 MoveHelper(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100469 } else if (instruction->IsLoadLocal()) {
470 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000471 if (Is64BitType(type)) {
472 MoveHelper(location, Location::DoubleStackSlot(stack_slot), type);
473 } else {
474 MoveHelper(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100475 }
476
477 } else {
478 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
479 MoveHelper(location, locations->Out(), type);
480 }
481}
482
483size_t CodeGeneratorARM64::FrameEntrySpillSize() const {
484 return GetFramePreservedRegistersSize();
485}
486
487Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
488 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000489
Alexandre Rames5319def2014-10-23 10:03:10 +0100490 switch (type) {
491 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000492 case Primitive::kPrimInt:
493 case Primitive::kPrimFloat:
494 return Location::StackSlot(GetStackSlot(load->GetLocal()));
495
496 case Primitive::kPrimLong:
497 case Primitive::kPrimDouble:
498 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
499
Alexandre Rames5319def2014-10-23 10:03:10 +0100500 case Primitive::kPrimBoolean:
501 case Primitive::kPrimByte:
502 case Primitive::kPrimChar:
503 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100504 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100505 LOG(FATAL) << "Unexpected type " << type;
506 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000507
Alexandre Rames5319def2014-10-23 10:03:10 +0100508 LOG(FATAL) << "Unreachable";
509 return Location::NoLocation();
510}
511
512void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
513 UseScratchRegisterScope temps(assembler_.vixl_masm_);
514 Register card = temps.AcquireX();
515 Register temp = temps.AcquireX();
516 vixl::Label done;
517 __ Cbz(value, &done);
518 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
519 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
520 __ Strb(card, MemOperand(card, temp));
521 __ Bind(&done);
522}
523
524void CodeGeneratorARM64::SetupBlockedRegisters() const {
525 // Block reserved registers:
526 // ip0 (VIXL temporary)
527 // ip1 (VIXL temporary)
528 // xSuspend (Suspend counter)
529 // lr
530 // sp is not part of the allocatable registers, so we don't need to block it.
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000531 // TODO: Avoid blocking callee-saved registers, and instead preserve them
532 // where necessary.
Alexandre Rames5319def2014-10-23 10:03:10 +0100533 CPURegList reserved_core_registers = vixl_reserved_core_registers;
534 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100535 reserved_core_registers.Combine(quick_callee_saved_registers);
536 while (!reserved_core_registers.IsEmpty()) {
537 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
538 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000539 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
540 reserved_fp_registers.Combine(CPURegList::GetCalleeSavedFP());
541 while (!reserved_core_registers.IsEmpty()) {
542 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
543 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100544}
545
546Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
547 if (type == Primitive::kPrimVoid) {
548 LOG(FATAL) << "Unreachable type " << type;
549 }
550
Alexandre Rames5319def2014-10-23 10:03:10 +0100551 if (IsFPType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000552 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
553 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100554 return Location::FpuRegisterLocation(reg);
555 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000556 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
557 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100558 return Location::RegisterLocation(reg);
559 }
560}
561
562void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
563 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
564}
565
566void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
567 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
568}
569
570#undef __
571#define __ assembler_->vixl_masm_->
572
573InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
574 CodeGeneratorARM64* codegen)
575 : HGraphVisitor(graph),
576 assembler_(codegen->GetAssembler()),
577 codegen_(codegen) {}
578
579#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
580 M(ArrayGet) \
581 M(ArraySet) \
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100582 M(ClinitCheck) \
Calin Juravle7c4954d2014-10-28 16:57:40 +0000583 M(Div) \
Calin Juravled0d48522014-11-04 16:40:20 +0000584 M(DivZeroCheck) \
Nicolas Geoffray52839d12014-11-07 17:47:25 +0000585 M(InvokeInterface) \
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100586 M(LoadClass) \
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000587 M(LoadException) \
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000588 M(LoadString) \
Alexandre Rames5319def2014-10-23 10:03:10 +0100589 M(Neg) \
590 M(NewArray) \
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100591 M(ParallelMove) \
592 M(StaticFieldGet) \
593 M(StaticFieldSet) \
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000594 M(Throw) \
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000595 M(TypeCheck) \
Roland Levillaindff1f282014-11-05 14:15:05 +0000596 M(TypeConversion) \
Alexandre Rames5319def2014-10-23 10:03:10 +0100597
598#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
599
600enum UnimplementedInstructionBreakCode {
601#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
602 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
603#undef ENUM_UNIMPLEMENTED_INSTRUCTION
604};
605
606#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
607 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700608 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +0100609 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
610 } \
611 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
612 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
613 locations->SetOut(Location::Any()); \
614 }
615 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
616#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
617
618#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
619
620void LocationsBuilderARM64::HandleAddSub(HBinaryOperation* instr) {
621 DCHECK(instr->IsAdd() || instr->IsSub());
622 DCHECK_EQ(instr->InputCount(), 2U);
623 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
624 Primitive::Type type = instr->GetResultType();
625 switch (type) {
626 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000627 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +0100628 locations->SetInAt(0, Location::RequiresRegister());
629 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +0000630 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +0100631 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000632
633 case Primitive::kPrimFloat:
634 case Primitive::kPrimDouble:
635 locations->SetInAt(0, Location::RequiresFpuRegister());
636 locations->SetInAt(1, Location::RequiresFpuRegister());
637 locations->SetOut(Location::RequiresFpuRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +0100638 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000639
Alexandre Rames5319def2014-10-23 10:03:10 +0100640 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000641 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +0100642 }
643}
644
645void InstructionCodeGeneratorARM64::HandleAddSub(HBinaryOperation* instr) {
646 DCHECK(instr->IsAdd() || instr->IsSub());
647
648 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +0100649
650 switch (type) {
651 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000652 case Primitive::kPrimLong: {
653 Register dst = OutputRegister(instr);
654 Register lhs = InputRegisterAt(instr, 0);
655 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100656 if (instr->IsAdd()) {
657 __ Add(dst, lhs, rhs);
658 } else {
659 __ Sub(dst, lhs, rhs);
660 }
661 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000662 }
663 case Primitive::kPrimFloat:
664 case Primitive::kPrimDouble: {
665 FPRegister dst = OutputFPRegister(instr);
666 FPRegister lhs = InputFPRegisterAt(instr, 0);
667 FPRegister rhs = InputFPRegisterAt(instr, 1);
668 if (instr->IsAdd()) {
669 __ Fadd(dst, lhs, rhs);
670 } else {
671 __ Fsub(dst, lhs, rhs);
672 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100673 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000674 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100675 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000676 LOG(FATAL) << "Unexpected add/sub type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +0100677 }
678}
679
680void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
681 HandleAddSub(instruction);
682}
683
684void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
685 HandleAddSub(instruction);
686}
687
688void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
689 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
690 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +0000691 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +0100692}
693
694void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
695 __ Ldr(OutputRegister(instruction),
696 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
697}
698
699void LocationsBuilderARM64::VisitCompare(HCompare* instruction) {
700 LocationSummary* locations =
701 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
702 locations->SetInAt(0, Location::RequiresRegister());
703 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +0000704 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +0100705}
706
707void InstructionCodeGeneratorARM64::VisitCompare(HCompare* instruction) {
708 Primitive::Type in_type = instruction->InputAt(0)->GetType();
709
710 DCHECK_EQ(in_type, Primitive::kPrimLong);
711 switch (in_type) {
712 case Primitive::kPrimLong: {
713 vixl::Label done;
714 Register result = OutputRegister(instruction);
715 Register left = InputRegisterAt(instruction, 0);
716 Operand right = InputOperandAt(instruction, 1);
717 __ Subs(result, left, right);
718 __ B(eq, &done);
719 __ Mov(result, 1);
720 __ Cneg(result, result, le);
721 __ Bind(&done);
722 break;
723 }
724 default:
725 LOG(FATAL) << "Unimplemented compare type " << in_type;
726 }
727}
728
729void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
730 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
731 locations->SetInAt(0, Location::RequiresRegister());
732 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
733 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +0000734 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +0100735 }
736}
737
738void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
739 if (!instruction->NeedsMaterialization()) {
740 return;
741 }
742
743 LocationSummary* locations = instruction->GetLocations();
744 Register lhs = InputRegisterAt(instruction, 0);
745 Operand rhs = InputOperandAt(instruction, 1);
746 Register res = RegisterFrom(locations->Out(), instruction->GetType());
747 Condition cond = ARM64Condition(instruction->GetCondition());
748
749 __ Cmp(lhs, rhs);
750 __ Csel(res, vixl::Assembler::AppropriateZeroRegFor(res), Operand(1), InvertCondition(cond));
751}
752
753#define FOR_EACH_CONDITION_INSTRUCTION(M) \
754 M(Equal) \
755 M(NotEqual) \
756 M(LessThan) \
757 M(LessThanOrEqual) \
758 M(GreaterThan) \
759 M(GreaterThanOrEqual)
760#define DEFINE_CONDITION_VISITORS(Name) \
761void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
762void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
763FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
764#undef FOR_EACH_CONDITION_INSTRUCTION
765
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000766void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
767 LocationSummary* locations =
768 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
769 locations->SetOut(Location::ConstantLocation(constant));
770}
771
772void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
773 UNUSED(constant);
774 // Will be generated at use site.
775}
776
Alexandre Rames5319def2014-10-23 10:03:10 +0100777void LocationsBuilderARM64::VisitExit(HExit* exit) {
778 exit->SetLocations(nullptr);
779}
780
781void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700782 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +0100783 if (kIsDebugBuild) {
784 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
785 __ Brk(0); // TODO: Introduce special markers for such code locations.
786 }
787}
788
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000789void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
790 LocationSummary* locations =
791 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
792 locations->SetOut(Location::ConstantLocation(constant));
793}
794
795void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
796 UNUSED(constant);
797 // Will be generated at use site.
798}
799
Alexandre Rames5319def2014-10-23 10:03:10 +0100800void LocationsBuilderARM64::VisitGoto(HGoto* got) {
801 got->SetLocations(nullptr);
802}
803
804void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
805 HBasicBlock* successor = got->GetSuccessor();
806 // TODO: Support for suspend checks emission.
807 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
808 __ B(codegen_->GetLabelOf(successor));
809 }
810}
811
812void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
813 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
814 HInstruction* cond = if_instr->InputAt(0);
815 DCHECK(cond->IsCondition());
816 if (cond->AsCondition()->NeedsMaterialization()) {
817 locations->SetInAt(0, Location::RequiresRegister());
818 }
819}
820
821void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
822 HInstruction* cond = if_instr->InputAt(0);
823 DCHECK(cond->IsCondition());
824 HCondition* condition = cond->AsCondition();
825 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
826 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
827
828 // TODO: Support constant condition input in VisitIf.
829
830 if (condition->NeedsMaterialization()) {
831 // The condition instruction has been materialized, compare the output to 0.
832 Location cond_val = if_instr->GetLocations()->InAt(0);
833 DCHECK(cond_val.IsRegister());
834 __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
835
836 } else {
837 // The condition instruction has not been materialized, use its inputs as
838 // the comparison and its condition as the branch condition.
839 Register lhs = InputRegisterAt(condition, 0);
840 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800841 Condition arm64_cond = ARM64Condition(condition->GetCondition());
842 if ((arm64_cond == eq || arm64_cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
843 if (arm64_cond == eq) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100844 __ Cbz(lhs, true_target);
845 } else {
846 __ Cbnz(lhs, true_target);
847 }
848 } else {
849 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800850 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +0100851 }
852 }
853
854 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
855 __ B(false_target);
856 }
857}
858
859void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
860 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
861 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +0000862 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +0100863}
864
865void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
866 Primitive::Type res_type = instruction->GetType();
867 Register res = OutputRegister(instruction);
868 Register obj = InputRegisterAt(instruction, 0);
869 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
870
871 switch (res_type) {
872 case Primitive::kPrimBoolean: {
873 __ Ldrb(res, MemOperand(obj, offset));
874 break;
875 }
876 case Primitive::kPrimByte: {
877 __ Ldrsb(res, MemOperand(obj, offset));
878 break;
879 }
880 case Primitive::kPrimShort: {
881 __ Ldrsh(res, MemOperand(obj, offset));
882 break;
883 }
884 case Primitive::kPrimChar: {
885 __ Ldrh(res, MemOperand(obj, offset));
886 break;
887 }
888 case Primitive::kPrimInt:
889 case Primitive::kPrimNot:
890 case Primitive::kPrimLong: { // TODO: support volatile.
891 DCHECK(res.IsX() == (res_type == Primitive::kPrimLong));
892 __ Ldr(res, MemOperand(obj, offset));
893 break;
894 }
895
896 case Primitive::kPrimFloat:
897 case Primitive::kPrimDouble:
898 LOG(FATAL) << "Unimplemented register res_type " << res_type;
899 break;
900
901 case Primitive::kPrimVoid:
902 LOG(FATAL) << "Unreachable res_type " << res_type;
903 }
904}
905
906void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
907 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
908 locations->SetInAt(0, Location::RequiresRegister());
909 locations->SetInAt(1, Location::RequiresRegister());
910}
911
912void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
913 Register obj = InputRegisterAt(instruction, 0);
914 Register value = InputRegisterAt(instruction, 1);
915 Primitive::Type field_type = instruction->InputAt(1)->GetType();
916 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
917
918 switch (field_type) {
919 case Primitive::kPrimBoolean:
920 case Primitive::kPrimByte: {
921 __ Strb(value, MemOperand(obj, offset));
922 break;
923 }
924
925 case Primitive::kPrimShort:
926 case Primitive::kPrimChar: {
927 __ Strh(value, MemOperand(obj, offset));
928 break;
929 }
930
931 case Primitive::kPrimInt:
932 case Primitive::kPrimNot:
933 case Primitive::kPrimLong: {
934 DCHECK(value.IsX() == (field_type == Primitive::kPrimLong));
935 __ Str(value, MemOperand(obj, offset));
936
937 if (field_type == Primitive::kPrimNot) {
938 codegen_->MarkGCCard(obj, value);
939 }
940 break;
941 }
942
943 case Primitive::kPrimFloat:
944 case Primitive::kPrimDouble:
945 LOG(FATAL) << "Unimplemented register type " << field_type;
946 break;
947
948 case Primitive::kPrimVoid:
949 LOG(FATAL) << "Unreachable type " << field_type;
950 }
951}
952
953void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
954 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
955 locations->SetOut(Location::ConstantLocation(constant));
956}
957
958void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
959 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700960 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +0100961}
962
963void LocationsBuilderARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
964 HandleInvoke(invoke);
965}
966
967void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
968 HandleInvoke(invoke);
969}
970
971void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
972 LocationSummary* locations =
973 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
974 locations->AddTemp(LocationFrom(x0));
975
976 InvokeDexCallingConventionVisitor calling_convention_visitor;
977 for (size_t i = 0; i < invoke->InputCount(); i++) {
978 HInstruction* input = invoke->InputAt(i);
979 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
980 }
981
982 Primitive::Type return_type = invoke->GetType();
983 if (return_type != Primitive::kPrimVoid) {
984 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
985 }
986}
987
988void InstructionCodeGeneratorARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
989 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
990 // Make sure that ArtMethod* is passed in W0 as per the calling convention
991 DCHECK(temp.Is(w0));
992 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
993 invoke->GetIndexInDexCache() * kHeapRefSize;
994
995 // TODO: Implement all kinds of calls:
996 // 1) boot -> boot
997 // 2) app -> boot
998 // 3) app -> app
999 //
1000 // Currently we implement the app -> app logic, which looks up in the resolve cache.
1001
1002 // temp = method;
1003 __ Ldr(temp, MemOperand(sp, kCurrentMethodStackOffset));
1004 // temp = temp->dex_cache_resolved_methods_;
1005 __ Ldr(temp, MemOperand(temp.X(), mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
1006 // temp = temp[index_in_cache];
1007 __ Ldr(temp, MemOperand(temp.X(), index_in_cache));
1008 // lr = temp->entry_point_from_quick_compiled_code_;
1009 __ Ldr(lr, MemOperand(temp.X(), mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
1010 // lr();
1011 __ Blr(lr);
1012
1013 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1014 DCHECK(!codegen_->IsLeafMethod());
1015}
1016
1017void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1018 LocationSummary* locations = invoke->GetLocations();
1019 Location receiver = locations->InAt(0);
1020 Register temp = XRegisterFrom(invoke->GetLocations()->GetTemp(0));
1021 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
1022 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1023 Offset class_offset = mirror::Object::ClassOffset();
1024 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset();
1025
1026 // temp = object->GetClass();
1027 if (receiver.IsStackSlot()) {
1028 __ Ldr(temp.W(), MemOperand(sp, receiver.GetStackIndex()));
1029 __ Ldr(temp.W(), MemOperand(temp, class_offset.SizeValue()));
1030 } else {
1031 DCHECK(receiver.IsRegister());
1032 __ Ldr(temp.W(), HeapOperandFrom(receiver, Primitive::kPrimNot,
1033 class_offset));
1034 }
1035 // temp = temp->GetMethodAt(method_offset);
1036 __ Ldr(temp.W(), MemOperand(temp, method_offset));
1037 // lr = temp->GetEntryPoint();
1038 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
1039 // lr();
1040 __ Blr(lr);
1041 DCHECK(!codegen_->IsLeafMethod());
1042 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1043}
1044
1045void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
1046 load->SetLocations(nullptr);
1047}
1048
1049void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
1050 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001051 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01001052}
1053
1054void LocationsBuilderARM64::VisitLocal(HLocal* local) {
1055 local->SetLocations(nullptr);
1056}
1057
1058void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
1059 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1060}
1061
1062void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
1063 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1064 locations->SetOut(Location::ConstantLocation(constant));
1065}
1066
1067void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
1068 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001069 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001070}
1071
Alexandre Rames42d641b2014-10-27 14:00:51 +00001072void LocationsBuilderARM64::VisitMul(HMul* mul) {
1073 LocationSummary* locations =
1074 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1075 switch (mul->GetResultType()) {
1076 case Primitive::kPrimInt:
1077 case Primitive::kPrimLong:
1078 locations->SetInAt(0, Location::RequiresRegister());
1079 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001080 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00001081 break;
1082
1083 case Primitive::kPrimFloat:
1084 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001085 locations->SetInAt(0, Location::RequiresFpuRegister());
1086 locations->SetInAt(1, Location::RequiresFpuRegister());
1087 locations->SetOut(Location::RequiresFpuRegister());
Alexandre Rames42d641b2014-10-27 14:00:51 +00001088 break;
1089
1090 default:
1091 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1092 }
1093}
1094
1095void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
1096 switch (mul->GetResultType()) {
1097 case Primitive::kPrimInt:
1098 case Primitive::kPrimLong:
1099 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
1100 break;
1101
1102 case Primitive::kPrimFloat:
1103 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001104 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00001105 break;
1106
1107 default:
1108 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1109 }
1110}
1111
Alexandre Rames5319def2014-10-23 10:03:10 +01001112void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
1113 LocationSummary* locations =
1114 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1115 InvokeRuntimeCallingConvention calling_convention;
1116 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
1117 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
1118 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1119}
1120
1121void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
1122 LocationSummary* locations = instruction->GetLocations();
1123 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
1124 DCHECK(type_index.Is(w0));
1125 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
1126 DCHECK(current_method.Is(w1));
1127 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1128 __ Mov(type_index, instruction->GetTypeIndex());
1129 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocObjectWithAccessCheck).Int32Value()));
1130 __ Blr(lr);
1131 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1132 DCHECK(!codegen_->IsLeafMethod());
1133}
1134
1135void LocationsBuilderARM64::VisitNot(HNot* instruction) {
1136 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00001137 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001138 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001139}
1140
1141void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
1142 switch (instruction->InputAt(0)->GetType()) {
1143 case Primitive::kPrimBoolean:
1144 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1));
1145 break;
1146
1147 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01001148 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01001149 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01001150 break;
1151
1152 default:
1153 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
1154 }
1155}
1156
1157void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
1158 LocationSummary* locations =
1159 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1160 locations->SetInAt(0, Location::RequiresRegister());
1161 if (instruction->HasUses()) {
1162 locations->SetOut(Location::SameAsFirstInput());
1163 }
1164}
1165
1166void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
1167 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
1168 codegen_->AddSlowPath(slow_path);
1169
1170 LocationSummary* locations = instruction->GetLocations();
1171 Location obj = locations->InAt(0);
1172 if (obj.IsRegister()) {
1173 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
1174 } else {
1175 DCHECK(obj.IsConstant()) << obj;
1176 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1177 __ B(slow_path->GetEntryLabel());
1178 }
1179}
1180
1181void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
1182 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1183 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1184 if (location.IsStackSlot()) {
1185 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1186 } else if (location.IsDoubleStackSlot()) {
1187 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1188 }
1189 locations->SetOut(location);
1190}
1191
1192void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
1193 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001194 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001195}
1196
1197void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
1198 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1199 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1200 locations->SetInAt(i, Location::Any());
1201 }
1202 locations->SetOut(Location::Any());
1203}
1204
1205void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001206 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001207 LOG(FATAL) << "Unreachable";
1208}
1209
1210void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
1211 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1212 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001213 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01001214}
1215
1216void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001217 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001218 codegen_->GenerateFrameExit();
1219 __ Br(lr);
1220}
1221
1222void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
1223 instruction->SetLocations(nullptr);
1224}
1225
1226void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001227 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001228 codegen_->GenerateFrameExit();
1229 __ Br(lr);
1230}
1231
1232void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
1233 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
1234 Primitive::Type field_type = store->InputAt(1)->GetType();
1235 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001236 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01001237 case Primitive::kPrimBoolean:
1238 case Primitive::kPrimByte:
1239 case Primitive::kPrimChar:
1240 case Primitive::kPrimShort:
1241 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001242 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01001243 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1244 break;
1245
1246 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001247 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01001248 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1249 break;
1250
1251 default:
1252 LOG(FATAL) << "Unimplemented local type " << field_type;
1253 }
1254}
1255
1256void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001257 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01001258}
1259
1260void LocationsBuilderARM64::VisitSub(HSub* instruction) {
1261 HandleAddSub(instruction);
1262}
1263
1264void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
1265 HandleAddSub(instruction);
1266}
1267
1268void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1269 LocationSummary* locations =
1270 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1271 locations->SetInAt(0, Location::RequiresRegister());
1272 locations->SetInAt(1, Location::RequiresRegister());
1273 if (instruction->HasUses()) {
1274 locations->SetOut(Location::SameAsFirstInput());
1275 }
1276}
1277
1278void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1279 LocationSummary* locations = instruction->GetLocations();
1280 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1281 instruction, locations->InAt(0), locations->InAt(1));
1282 codegen_->AddSlowPath(slow_path);
1283
1284 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1285 __ B(slow_path->GetEntryLabel(), hs);
1286}
1287
1288void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1289 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
1290}
1291
1292void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1293 // TODO: Improve support for suspend checks.
1294 SuspendCheckSlowPathARM64* slow_path =
1295 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, nullptr);
1296 codegen_->AddSlowPath(slow_path);
1297
1298 __ Subs(wSuspend, wSuspend, 1);
1299 __ B(slow_path->GetEntryLabel(), le);
1300 __ Bind(slow_path->GetReturnLabel());
1301}
1302
1303void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
1304 temp->SetLocations(nullptr);
1305}
1306
1307void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
1308 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001309 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01001310}
1311
1312} // namespace arm64
1313} // namespace art