blob: ec9af73a712d07e58af6726aefe9e52193573adb [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/arm64/assembler_arm64.h"
26#include "utils/assembler.h"
27#include "utils/stack_checks.h"
28
29
30using namespace vixl; // NOLINT(build/namespaces)
31
32#ifdef __
33#error "ARM64 Codegen VIXL macro-assembler macro already defined."
34#endif
35
36
37namespace art {
38
39namespace arm64 {
40
41static bool IsFPType(Primitive::Type type) {
42 return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble;
43}
44
45// TODO: clean-up some of the constant definitions.
46static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
47static constexpr int kCurrentMethodStackOffset = 0;
48
49namespace {
50// Convenience helpers to ease conversion to and from VIXL operands.
51
52int VIXLRegCodeFromART(int code) {
53 // TODO: static check?
54 DCHECK_EQ(SP, 31);
55 DCHECK_EQ(WSP, 31);
56 DCHECK_EQ(XZR, 32);
57 DCHECK_EQ(WZR, 32);
58 if (code == SP) {
59 return vixl::kSPRegInternalCode;
60 }
61 if (code == XZR) {
62 return vixl::kZeroRegCode;
63 }
64 return code;
65}
66
67int ARTRegCodeFromVIXL(int code) {
68 // TODO: static check?
69 DCHECK_EQ(SP, 31);
70 DCHECK_EQ(WSP, 31);
71 DCHECK_EQ(XZR, 32);
72 DCHECK_EQ(WZR, 32);
73 if (code == vixl::kSPRegInternalCode) {
74 return SP;
75 }
76 if (code == vixl::kZeroRegCode) {
77 return XZR;
78 }
79 return code;
80}
81
82Register XRegisterFrom(Location location) {
83 return Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
84}
85
86Register WRegisterFrom(Location location) {
87 return Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
88}
89
90Register RegisterFrom(Location location, Primitive::Type type) {
91 DCHECK(type != Primitive::kPrimVoid && !IsFPType(type));
92 return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
93}
94
95Register OutputRegister(HInstruction* instr) {
96 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
97}
98
99Register InputRegisterAt(HInstruction* instr, int input_index) {
100 return RegisterFrom(instr->GetLocations()->InAt(input_index),
101 instr->InputAt(input_index)->GetType());
102}
103
104int64_t Int64ConstantFrom(Location location) {
105 HConstant* instr = location.GetConstant();
106 return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue()
107 : instr->AsLongConstant()->GetValue();
108}
109
110Operand OperandFrom(Location location, Primitive::Type type) {
111 if (location.IsRegister()) {
112 return Operand(RegisterFrom(location, type));
113 } else {
114 return Operand(Int64ConstantFrom(location));
115 }
116}
117
118Operand InputOperandAt(HInstruction* instr, int input_index) {
119 return OperandFrom(instr->GetLocations()->InAt(input_index),
120 instr->InputAt(input_index)->GetType());
121}
122
123MemOperand StackOperandFrom(Location location) {
124 return MemOperand(sp, location.GetStackIndex());
125}
126
127MemOperand HeapOperand(const Register& base, Offset offset) {
128 // A heap reference must be 32bit, so fit in a W register.
129 DCHECK(base.IsW());
130 return MemOperand(base.X(), offset.SizeValue());
131}
132
133MemOperand HeapOperandFrom(Location location, Primitive::Type type, Offset offset) {
134 return HeapOperand(RegisterFrom(location, type), offset);
135}
136
137Location LocationFrom(const Register& reg) {
138 return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
139}
140
141} // namespace
142
143inline Condition ARM64Condition(IfCondition cond) {
144 switch (cond) {
145 case kCondEQ: return eq;
146 case kCondNE: return ne;
147 case kCondLT: return lt;
148 case kCondLE: return le;
149 case kCondGT: return gt;
150 case kCondGE: return ge;
151 default:
152 LOG(FATAL) << "Unknown if condition";
153 }
154 return nv; // Unreachable.
155}
156
157static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
158static constexpr size_t kRuntimeParameterCoreRegistersLength =
159 arraysize(kRuntimeParameterCoreRegisters);
160static const FPRegister kRuntimeParameterFpuRegisters[] = { };
161static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
162
163class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
164 public:
165 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
166
167 InvokeRuntimeCallingConvention()
168 : CallingConvention(kRuntimeParameterCoreRegisters,
169 kRuntimeParameterCoreRegistersLength,
170 kRuntimeParameterFpuRegisters,
171 kRuntimeParameterFpuRegistersLength) {}
172
173 Location GetReturnLocation(Primitive::Type return_type);
174
175 private:
176 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
177};
178
179Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
180 DCHECK_NE(return_type, Primitive::kPrimVoid);
181 if (return_type == Primitive::kPrimFloat || return_type == Primitive::kPrimDouble) {
182 LOG(FATAL) << "Unimplemented return type " << return_type;
183 }
184 return LocationFrom(x0);
185}
186
187#define __ reinterpret_cast<Arm64Assembler*>(codegen->GetAssembler())->vixl_masm_->
188
189class SlowPathCodeARM64 : public SlowPathCode {
190 public:
191 SlowPathCodeARM64() : entry_label_(), exit_label_() {}
192
193 vixl::Label* GetEntryLabel() { return &entry_label_; }
194 vixl::Label* GetExitLabel() { return &exit_label_; }
195
196 private:
197 vixl::Label entry_label_;
198 vixl::Label exit_label_;
199
200 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
201};
202
203class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
204 public:
205 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
206 Location index_location,
207 Location length_location)
208 : instruction_(instruction),
209 index_location_(index_location),
210 length_location_(length_location) {}
211
212 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
213 CodeGeneratorARM64* arm64_codegen = reinterpret_cast<CodeGeneratorARM64*>(codegen);
214 __ Bind(GetEntryLabel());
215 InvokeRuntimeCallingConvention calling_convention;
216 arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(0)),
217 index_location_, Primitive::kPrimInt);
218 arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(1)),
219 length_location_, Primitive::kPrimInt);
220 size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowArrayBounds).SizeValue();
221 __ Ldr(lr, MemOperand(tr, offset));
222 __ Blr(lr);
223 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
224 }
225
226 private:
227 HBoundsCheck* const instruction_;
228 const Location index_location_;
229 const Location length_location_;
230
231 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
232};
233
234class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
235 public:
236 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
237
238 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
239 __ Bind(GetEntryLabel());
240 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowNullPointer).Int32Value();
241 __ Ldr(lr, MemOperand(tr, offset));
242 __ Blr(lr);
243 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
244 }
245
246 private:
247 HNullCheck* const instruction_;
248
249 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
250};
251
252class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
253 public:
254 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
255 HBasicBlock* successor)
256 : instruction_(instruction), successor_(successor) {}
257
258 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
259 size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pTestSuspend).SizeValue();
260 __ Bind(GetEntryLabel());
261 __ Ldr(lr, MemOperand(tr, offset));
262 __ Blr(lr);
263 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
264 __ B(GetReturnLabel());
265 }
266
267 vixl::Label* GetReturnLabel() {
268 DCHECK(successor_ == nullptr);
269 return &return_label_;
270 }
271
272
273 private:
274 HSuspendCheck* const instruction_;
275 // If not null, the block to branch to after the suspend check.
276 HBasicBlock* const successor_;
277
278 // If `successor_` is null, the label to branch to after the suspend check.
279 vixl::Label return_label_;
280
281 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
282};
283
284#undef __
285
286Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
287 Location next_location;
288 if (type == Primitive::kPrimVoid) {
289 LOG(FATAL) << "Unreachable type " << type;
290 }
291
292 if (type == Primitive::kPrimFloat || type == Primitive::kPrimDouble) {
293 LOG(FATAL) << "Unimplemented type " << type;
294 }
295
296 if (gp_index_ < calling_convention.GetNumberOfRegisters()) {
297 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_));
298 if (type == Primitive::kPrimLong) {
299 // Double stack slot reserved on the stack.
300 stack_index_++;
301 }
302 } else { // Stack.
303 if (type == Primitive::kPrimLong) {
304 next_location = Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_));
305 // Double stack slot reserved on the stack.
306 stack_index_++;
307 } else {
308 next_location = Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_));
309 }
310 }
311 // Move to the next register/stack slot.
312 gp_index_++;
313 stack_index_++;
314 return next_location;
315}
316
317CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph)
318 : CodeGenerator(graph,
319 kNumberOfAllocatableRegisters,
320 kNumberOfAllocatableFloatingPointRegisters,
321 kNumberOfAllocatableRegisterPairs),
322 block_labels_(nullptr),
323 location_builder_(graph, this),
324 instruction_visitor_(graph, this) {}
325
326#define __ reinterpret_cast<Arm64Assembler*>(GetAssembler())->vixl_masm_->
327
328void CodeGeneratorARM64::GenerateFrameEntry() {
329 // TODO: Add proper support for the stack overflow check.
330 UseScratchRegisterScope temps(assembler_.vixl_masm_);
331 Register temp = temps.AcquireX();
332 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
333 __ Ldr(temp, MemOperand(temp, 0));
334 RecordPcInfo(nullptr, 0);
335
336 CPURegList preserved_regs = GetFramePreservedRegisters();
337 int frame_size = GetFrameSize();
338 core_spill_mask_ |= preserved_regs.list();
339
340 __ Str(w0, MemOperand(sp, -frame_size, PreIndex));
341 __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
342
343 // Stack layout:
344 // sp[frame_size - 8] : lr.
345 // ... : other preserved registers.
346 // sp[frame_size - regs_size]: first preserved register.
347 // ... : reserved frame space.
348 // sp[0] : context pointer.
349}
350
351void CodeGeneratorARM64::GenerateFrameExit() {
352 int frame_size = GetFrameSize();
353 CPURegList preserved_regs = GetFramePreservedRegisters();
354 __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
355 __ Drop(frame_size);
356}
357
358void CodeGeneratorARM64::Bind(HBasicBlock* block) {
359 __ Bind(GetLabelOf(block));
360}
361
362void CodeGeneratorARM64::MoveHelper(Location destination,
363 Location source,
364 Primitive::Type type) {
365 if (source.Equals(destination)) {
366 return;
367 }
368 if (destination.IsRegister()) {
369 Register dst = RegisterFrom(destination, type);
370 if (source.IsRegister()) {
371 Register src = RegisterFrom(source, type);
372 DCHECK(dst.IsSameSizeAndType(src));
373 __ Mov(dst, src);
374 } else {
375 DCHECK(dst.Is64Bits() || !source.IsDoubleStackSlot());
376 __ Ldr(dst, StackOperandFrom(source));
377 }
378 } else {
379 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
380 if (source.IsRegister()) {
381 __ Str(RegisterFrom(source, type), StackOperandFrom(destination));
382 } else {
383 UseScratchRegisterScope temps(assembler_.vixl_masm_);
384 Register temp = destination.IsDoubleStackSlot() ? temps.AcquireX() : temps.AcquireW();
385 __ Ldr(temp, StackOperandFrom(source));
386 __ Str(temp, StackOperandFrom(destination));
387 }
388 }
389}
390
391void CodeGeneratorARM64::Move(HInstruction* instruction,
392 Location location,
393 HInstruction* move_for) {
394 LocationSummary* locations = instruction->GetLocations();
395 if (locations != nullptr && locations->Out().Equals(location)) {
396 return;
397 }
398
399 Primitive::Type type = instruction->GetType();
400
401 if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
402 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
403 : instruction->AsLongConstant()->GetValue();
404 if (location.IsRegister()) {
405 Register dst = RegisterFrom(location, type);
406 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
407 (instruction->IsLongConstant() && dst.Is64Bits()));
408 __ Mov(dst, value);
409 } else {
410 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
411 UseScratchRegisterScope temps(assembler_.vixl_masm_);
412 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
413 __ Mov(temp, value);
414 __ Str(temp, StackOperandFrom(location));
415 }
416
417 } else if (instruction->IsLoadLocal()) {
418 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
419 switch (type) {
420 case Primitive::kPrimNot:
421 case Primitive::kPrimBoolean:
422 case Primitive::kPrimByte:
423 case Primitive::kPrimChar:
424 case Primitive::kPrimShort:
425 case Primitive::kPrimInt:
426 MoveHelper(location, Location::StackSlot(stack_slot), type);
427 break;
428 case Primitive::kPrimLong:
429 MoveHelper(location, Location::DoubleStackSlot(stack_slot), type);
430 break;
431 default:
432 LOG(FATAL) << "Unimplemented type" << type;
433 }
434
435 } else {
436 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
437 MoveHelper(location, locations->Out(), type);
438 }
439}
440
441size_t CodeGeneratorARM64::FrameEntrySpillSize() const {
442 return GetFramePreservedRegistersSize();
443}
444
445Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
446 Primitive::Type type = load->GetType();
447 switch (type) {
448 case Primitive::kPrimNot:
449 case Primitive::kPrimBoolean:
450 case Primitive::kPrimByte:
451 case Primitive::kPrimChar:
452 case Primitive::kPrimShort:
453 case Primitive::kPrimInt:
454 return Location::StackSlot(GetStackSlot(load->GetLocal()));
455 case Primitive::kPrimLong:
456 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
457 case Primitive::kPrimFloat:
458 case Primitive::kPrimDouble:
459 LOG(FATAL) << "Unimplemented type " << type;
460 break;
461 case Primitive::kPrimVoid:
462 default:
463 LOG(FATAL) << "Unexpected type " << type;
464 }
465 LOG(FATAL) << "Unreachable";
466 return Location::NoLocation();
467}
468
469void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
470 UseScratchRegisterScope temps(assembler_.vixl_masm_);
471 Register card = temps.AcquireX();
472 Register temp = temps.AcquireX();
473 vixl::Label done;
474 __ Cbz(value, &done);
475 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
476 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
477 __ Strb(card, MemOperand(card, temp));
478 __ Bind(&done);
479}
480
481void CodeGeneratorARM64::SetupBlockedRegisters() const {
482 // Block reserved registers:
483 // ip0 (VIXL temporary)
484 // ip1 (VIXL temporary)
485 // xSuspend (Suspend counter)
486 // lr
487 // sp is not part of the allocatable registers, so we don't need to block it.
488 CPURegList reserved_core_registers = vixl_reserved_core_registers;
489 reserved_core_registers.Combine(runtime_reserved_core_registers);
490 // TODO: See if we should instead allow allocating but preserve those if used.
491 reserved_core_registers.Combine(quick_callee_saved_registers);
492 while (!reserved_core_registers.IsEmpty()) {
493 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
494 }
495}
496
497Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
498 if (type == Primitive::kPrimVoid) {
499 LOG(FATAL) << "Unreachable type " << type;
500 }
501
502 if (type == Primitive::kPrimFloat || type == Primitive::kPrimDouble) {
503 LOG(FATAL) << "Unimplemented support for floating-point";
504 }
505
506 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfXRegisters);
507 DCHECK_NE(reg, -1);
508 blocked_core_registers_[reg] = true;
509
510 if (IsFPType(type)) {
511 return Location::FpuRegisterLocation(reg);
512 } else {
513 return Location::RegisterLocation(reg);
514 }
515}
516
517void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
518 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
519}
520
521void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
522 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
523}
524
525#undef __
526#define __ assembler_->vixl_masm_->
527
528InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
529 CodeGeneratorARM64* codegen)
530 : HGraphVisitor(graph),
531 assembler_(codegen->GetAssembler()),
532 codegen_(codegen) {}
533
534#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
535 M(ArrayGet) \
536 M(ArraySet) \
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100537 M(ClinitCheck) \
Alexandre Rames5319def2014-10-23 10:03:10 +0100538 M(DoubleConstant) \
Calin Juravle7c4954d2014-10-28 16:57:40 +0000539 M(Div) \
Alexandre Rames5319def2014-10-23 10:03:10 +0100540 M(FloatConstant) \
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100541 M(LoadClass) \
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000542 M(LoadString) \
Alexandre Rames5319def2014-10-23 10:03:10 +0100543 M(Neg) \
544 M(NewArray) \
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100545 M(ParallelMove) \
546 M(StaticFieldGet) \
547 M(StaticFieldSet) \
Alexandre Rames5319def2014-10-23 10:03:10 +0100548
549#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
550
551enum UnimplementedInstructionBreakCode {
552#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
553 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
554#undef ENUM_UNIMPLEMENTED_INSTRUCTION
555};
556
557#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
558 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
559 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
560 } \
561 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
562 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
563 locations->SetOut(Location::Any()); \
564 }
565 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
566#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
567
568#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
569
570void LocationsBuilderARM64::HandleAddSub(HBinaryOperation* instr) {
571 DCHECK(instr->IsAdd() || instr->IsSub());
572 DCHECK_EQ(instr->InputCount(), 2U);
573 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
574 Primitive::Type type = instr->GetResultType();
575 switch (type) {
576 case Primitive::kPrimInt:
577 case Primitive::kPrimLong: {
578 locations->SetInAt(0, Location::RequiresRegister());
579 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
580 locations->SetOut(Location::RequiresRegister());
581 break;
582 }
583 case Primitive::kPrimBoolean:
584 case Primitive::kPrimByte:
585 case Primitive::kPrimChar:
586 case Primitive::kPrimShort:
587 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
588 break;
589 default:
590 LOG(FATAL) << "Unimplemented " << instr->DebugName() << " type " << type;
591 }
592}
593
594void InstructionCodeGeneratorARM64::HandleAddSub(HBinaryOperation* instr) {
595 DCHECK(instr->IsAdd() || instr->IsSub());
596
597 Primitive::Type type = instr->GetType();
598 Register dst = OutputRegister(instr);
599 Register lhs = InputRegisterAt(instr, 0);
600 Operand rhs = InputOperandAt(instr, 1);
601
602 switch (type) {
603 case Primitive::kPrimInt:
604 case Primitive::kPrimLong:
605 if (instr->IsAdd()) {
606 __ Add(dst, lhs, rhs);
607 } else {
608 __ Sub(dst, lhs, rhs);
609 }
610 break;
611
612 case Primitive::kPrimBoolean:
613 case Primitive::kPrimByte:
614 case Primitive::kPrimChar:
615 case Primitive::kPrimShort:
616 LOG(FATAL) << "Unexpected add/sub type " << type;
617 break;
618 default:
619 LOG(FATAL) << "Unimplemented add/sub type " << type;
620 }
621}
622
623void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
624 HandleAddSub(instruction);
625}
626
627void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
628 HandleAddSub(instruction);
629}
630
631void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
632 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
633 locations->SetInAt(0, Location::RequiresRegister());
634 locations->SetOut(Location::RequiresRegister());
635}
636
637void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
638 __ Ldr(OutputRegister(instruction),
639 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
640}
641
642void LocationsBuilderARM64::VisitCompare(HCompare* instruction) {
643 LocationSummary* locations =
644 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
645 locations->SetInAt(0, Location::RequiresRegister());
646 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
647 locations->SetOut(Location::RequiresRegister());
648}
649
650void InstructionCodeGeneratorARM64::VisitCompare(HCompare* instruction) {
651 Primitive::Type in_type = instruction->InputAt(0)->GetType();
652
653 DCHECK_EQ(in_type, Primitive::kPrimLong);
654 switch (in_type) {
655 case Primitive::kPrimLong: {
656 vixl::Label done;
657 Register result = OutputRegister(instruction);
658 Register left = InputRegisterAt(instruction, 0);
659 Operand right = InputOperandAt(instruction, 1);
660 __ Subs(result, left, right);
661 __ B(eq, &done);
662 __ Mov(result, 1);
663 __ Cneg(result, result, le);
664 __ Bind(&done);
665 break;
666 }
667 default:
668 LOG(FATAL) << "Unimplemented compare type " << in_type;
669 }
670}
671
672void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
673 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
674 locations->SetInAt(0, Location::RequiresRegister());
675 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
676 if (instruction->NeedsMaterialization()) {
677 locations->SetOut(Location::RequiresRegister());
678 }
679}
680
681void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
682 if (!instruction->NeedsMaterialization()) {
683 return;
684 }
685
686 LocationSummary* locations = instruction->GetLocations();
687 Register lhs = InputRegisterAt(instruction, 0);
688 Operand rhs = InputOperandAt(instruction, 1);
689 Register res = RegisterFrom(locations->Out(), instruction->GetType());
690 Condition cond = ARM64Condition(instruction->GetCondition());
691
692 __ Cmp(lhs, rhs);
693 __ Csel(res, vixl::Assembler::AppropriateZeroRegFor(res), Operand(1), InvertCondition(cond));
694}
695
696#define FOR_EACH_CONDITION_INSTRUCTION(M) \
697 M(Equal) \
698 M(NotEqual) \
699 M(LessThan) \
700 M(LessThanOrEqual) \
701 M(GreaterThan) \
702 M(GreaterThanOrEqual)
703#define DEFINE_CONDITION_VISITORS(Name) \
704void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
705void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
706FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
707#undef FOR_EACH_CONDITION_INSTRUCTION
708
709void LocationsBuilderARM64::VisitExit(HExit* exit) {
710 exit->SetLocations(nullptr);
711}
712
713void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
714 if (kIsDebugBuild) {
715 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
716 __ Brk(0); // TODO: Introduce special markers for such code locations.
717 }
718}
719
720void LocationsBuilderARM64::VisitGoto(HGoto* got) {
721 got->SetLocations(nullptr);
722}
723
724void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
725 HBasicBlock* successor = got->GetSuccessor();
726 // TODO: Support for suspend checks emission.
727 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
728 __ B(codegen_->GetLabelOf(successor));
729 }
730}
731
732void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
733 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
734 HInstruction* cond = if_instr->InputAt(0);
735 DCHECK(cond->IsCondition());
736 if (cond->AsCondition()->NeedsMaterialization()) {
737 locations->SetInAt(0, Location::RequiresRegister());
738 }
739}
740
741void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
742 HInstruction* cond = if_instr->InputAt(0);
743 DCHECK(cond->IsCondition());
744 HCondition* condition = cond->AsCondition();
745 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
746 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
747
748 // TODO: Support constant condition input in VisitIf.
749
750 if (condition->NeedsMaterialization()) {
751 // The condition instruction has been materialized, compare the output to 0.
752 Location cond_val = if_instr->GetLocations()->InAt(0);
753 DCHECK(cond_val.IsRegister());
754 __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
755
756 } else {
757 // The condition instruction has not been materialized, use its inputs as
758 // the comparison and its condition as the branch condition.
759 Register lhs = InputRegisterAt(condition, 0);
760 Operand rhs = InputOperandAt(condition, 1);
761 Condition cond = ARM64Condition(condition->GetCondition());
762 if ((cond == eq || cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
763 if (cond == eq) {
764 __ Cbz(lhs, true_target);
765 } else {
766 __ Cbnz(lhs, true_target);
767 }
768 } else {
769 __ Cmp(lhs, rhs);
770 __ B(cond, true_target);
771 }
772 }
773
774 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
775 __ B(false_target);
776 }
777}
778
779void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
780 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
781 locations->SetInAt(0, Location::RequiresRegister());
782 locations->SetOut(Location::RequiresRegister());
783}
784
785void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
786 Primitive::Type res_type = instruction->GetType();
787 Register res = OutputRegister(instruction);
788 Register obj = InputRegisterAt(instruction, 0);
789 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
790
791 switch (res_type) {
792 case Primitive::kPrimBoolean: {
793 __ Ldrb(res, MemOperand(obj, offset));
794 break;
795 }
796 case Primitive::kPrimByte: {
797 __ Ldrsb(res, MemOperand(obj, offset));
798 break;
799 }
800 case Primitive::kPrimShort: {
801 __ Ldrsh(res, MemOperand(obj, offset));
802 break;
803 }
804 case Primitive::kPrimChar: {
805 __ Ldrh(res, MemOperand(obj, offset));
806 break;
807 }
808 case Primitive::kPrimInt:
809 case Primitive::kPrimNot:
810 case Primitive::kPrimLong: { // TODO: support volatile.
811 DCHECK(res.IsX() == (res_type == Primitive::kPrimLong));
812 __ Ldr(res, MemOperand(obj, offset));
813 break;
814 }
815
816 case Primitive::kPrimFloat:
817 case Primitive::kPrimDouble:
818 LOG(FATAL) << "Unimplemented register res_type " << res_type;
819 break;
820
821 case Primitive::kPrimVoid:
822 LOG(FATAL) << "Unreachable res_type " << res_type;
823 }
824}
825
826void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
827 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
828 locations->SetInAt(0, Location::RequiresRegister());
829 locations->SetInAt(1, Location::RequiresRegister());
830}
831
832void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
833 Register obj = InputRegisterAt(instruction, 0);
834 Register value = InputRegisterAt(instruction, 1);
835 Primitive::Type field_type = instruction->InputAt(1)->GetType();
836 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
837
838 switch (field_type) {
839 case Primitive::kPrimBoolean:
840 case Primitive::kPrimByte: {
841 __ Strb(value, MemOperand(obj, offset));
842 break;
843 }
844
845 case Primitive::kPrimShort:
846 case Primitive::kPrimChar: {
847 __ Strh(value, MemOperand(obj, offset));
848 break;
849 }
850
851 case Primitive::kPrimInt:
852 case Primitive::kPrimNot:
853 case Primitive::kPrimLong: {
854 DCHECK(value.IsX() == (field_type == Primitive::kPrimLong));
855 __ Str(value, MemOperand(obj, offset));
856
857 if (field_type == Primitive::kPrimNot) {
858 codegen_->MarkGCCard(obj, value);
859 }
860 break;
861 }
862
863 case Primitive::kPrimFloat:
864 case Primitive::kPrimDouble:
865 LOG(FATAL) << "Unimplemented register type " << field_type;
866 break;
867
868 case Primitive::kPrimVoid:
869 LOG(FATAL) << "Unreachable type " << field_type;
870 }
871}
872
873void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
874 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
875 locations->SetOut(Location::ConstantLocation(constant));
876}
877
878void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
879 // Will be generated at use site.
880}
881
882void LocationsBuilderARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
883 HandleInvoke(invoke);
884}
885
886void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
887 HandleInvoke(invoke);
888}
889
890void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
891 LocationSummary* locations =
892 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
893 locations->AddTemp(LocationFrom(x0));
894
895 InvokeDexCallingConventionVisitor calling_convention_visitor;
896 for (size_t i = 0; i < invoke->InputCount(); i++) {
897 HInstruction* input = invoke->InputAt(i);
898 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
899 }
900
901 Primitive::Type return_type = invoke->GetType();
902 if (return_type != Primitive::kPrimVoid) {
903 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
904 }
905}
906
907void InstructionCodeGeneratorARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
908 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
909 // Make sure that ArtMethod* is passed in W0 as per the calling convention
910 DCHECK(temp.Is(w0));
911 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
912 invoke->GetIndexInDexCache() * kHeapRefSize;
913
914 // TODO: Implement all kinds of calls:
915 // 1) boot -> boot
916 // 2) app -> boot
917 // 3) app -> app
918 //
919 // Currently we implement the app -> app logic, which looks up in the resolve cache.
920
921 // temp = method;
922 __ Ldr(temp, MemOperand(sp, kCurrentMethodStackOffset));
923 // temp = temp->dex_cache_resolved_methods_;
924 __ Ldr(temp, MemOperand(temp.X(), mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
925 // temp = temp[index_in_cache];
926 __ Ldr(temp, MemOperand(temp.X(), index_in_cache));
927 // lr = temp->entry_point_from_quick_compiled_code_;
928 __ Ldr(lr, MemOperand(temp.X(), mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
929 // lr();
930 __ Blr(lr);
931
932 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
933 DCHECK(!codegen_->IsLeafMethod());
934}
935
936void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
937 LocationSummary* locations = invoke->GetLocations();
938 Location receiver = locations->InAt(0);
939 Register temp = XRegisterFrom(invoke->GetLocations()->GetTemp(0));
940 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
941 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
942 Offset class_offset = mirror::Object::ClassOffset();
943 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset();
944
945 // temp = object->GetClass();
946 if (receiver.IsStackSlot()) {
947 __ Ldr(temp.W(), MemOperand(sp, receiver.GetStackIndex()));
948 __ Ldr(temp.W(), MemOperand(temp, class_offset.SizeValue()));
949 } else {
950 DCHECK(receiver.IsRegister());
951 __ Ldr(temp.W(), HeapOperandFrom(receiver, Primitive::kPrimNot,
952 class_offset));
953 }
954 // temp = temp->GetMethodAt(method_offset);
955 __ Ldr(temp.W(), MemOperand(temp, method_offset));
956 // lr = temp->GetEntryPoint();
957 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
958 // lr();
959 __ Blr(lr);
960 DCHECK(!codegen_->IsLeafMethod());
961 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
962}
963
964void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
965 load->SetLocations(nullptr);
966}
967
968void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
969 // Nothing to do, this is driven by the code generator.
970}
971
972void LocationsBuilderARM64::VisitLocal(HLocal* local) {
973 local->SetLocations(nullptr);
974}
975
976void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
977 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
978}
979
980void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
981 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
982 locations->SetOut(Location::ConstantLocation(constant));
983}
984
985void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
986 // Will be generated at use site.
987}
988
Alexandre Rames42d641b2014-10-27 14:00:51 +0000989void LocationsBuilderARM64::VisitMul(HMul* mul) {
990 LocationSummary* locations =
991 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
992 switch (mul->GetResultType()) {
993 case Primitive::kPrimInt:
994 case Primitive::kPrimLong:
995 locations->SetInAt(0, Location::RequiresRegister());
996 locations->SetInAt(1, Location::RequiresRegister());
997 locations->SetOut(Location::RequiresRegister());
998 break;
999
1000 case Primitive::kPrimFloat:
1001 case Primitive::kPrimDouble:
1002 LOG(FATAL) << "Unimplemented mul type " << mul->GetResultType();
1003 break;
1004
1005 default:
1006 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1007 }
1008}
1009
1010void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
1011 switch (mul->GetResultType()) {
1012 case Primitive::kPrimInt:
1013 case Primitive::kPrimLong:
1014 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
1015 break;
1016
1017 case Primitive::kPrimFloat:
1018 case Primitive::kPrimDouble:
1019 LOG(FATAL) << "Unimplemented mul type " << mul->GetResultType();
1020 break;
1021
1022 default:
1023 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1024 }
1025}
1026
Alexandre Rames5319def2014-10-23 10:03:10 +01001027void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
1028 LocationSummary* locations =
1029 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1030 InvokeRuntimeCallingConvention calling_convention;
1031 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
1032 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
1033 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1034}
1035
1036void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
1037 LocationSummary* locations = instruction->GetLocations();
1038 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
1039 DCHECK(type_index.Is(w0));
1040 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
1041 DCHECK(current_method.Is(w1));
1042 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1043 __ Mov(type_index, instruction->GetTypeIndex());
1044 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocObjectWithAccessCheck).Int32Value()));
1045 __ Blr(lr);
1046 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1047 DCHECK(!codegen_->IsLeafMethod());
1048}
1049
1050void LocationsBuilderARM64::VisitNot(HNot* instruction) {
1051 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1052 locations->SetInAt(0, Location::RequiresRegister());
1053 locations->SetOut(Location::RequiresRegister());
1054}
1055
1056void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
1057 switch (instruction->InputAt(0)->GetType()) {
1058 case Primitive::kPrimBoolean:
1059 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1));
1060 break;
1061
1062 case Primitive::kPrimInt:
1063 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
1064 break;
1065
1066 case Primitive::kPrimLong:
1067 LOG(FATAL) << "Not yet implemented type for not operation " << instruction->GetResultType();
1068 break;
1069
1070 default:
1071 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
1072 }
1073}
1074
1075void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
1076 LocationSummary* locations =
1077 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1078 locations->SetInAt(0, Location::RequiresRegister());
1079 if (instruction->HasUses()) {
1080 locations->SetOut(Location::SameAsFirstInput());
1081 }
1082}
1083
1084void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
1085 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
1086 codegen_->AddSlowPath(slow_path);
1087
1088 LocationSummary* locations = instruction->GetLocations();
1089 Location obj = locations->InAt(0);
1090 if (obj.IsRegister()) {
1091 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
1092 } else {
1093 DCHECK(obj.IsConstant()) << obj;
1094 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1095 __ B(slow_path->GetEntryLabel());
1096 }
1097}
1098
1099void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
1100 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1101 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1102 if (location.IsStackSlot()) {
1103 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1104 } else if (location.IsDoubleStackSlot()) {
1105 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1106 }
1107 locations->SetOut(location);
1108}
1109
1110void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
1111 // Nothing to do, the parameter is already at its location.
1112}
1113
1114void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
1115 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1116 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1117 locations->SetInAt(i, Location::Any());
1118 }
1119 locations->SetOut(Location::Any());
1120}
1121
1122void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
1123 LOG(FATAL) << "Unreachable";
1124}
1125
1126void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
1127 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1128 Primitive::Type return_type = instruction->InputAt(0)->GetType();
1129
1130 if (return_type == Primitive::kPrimFloat || return_type == Primitive::kPrimDouble) {
1131 LOG(FATAL) << "Unimplemented return type " << return_type;
1132 }
1133
1134 locations->SetInAt(0, LocationFrom(x0));
1135}
1136
1137void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
1138 if (kIsDebugBuild) {
1139 Primitive::Type type = instruction->InputAt(0)->GetType();
1140 switch (type) {
1141 case Primitive::kPrimBoolean:
1142 case Primitive::kPrimByte:
1143 case Primitive::kPrimChar:
1144 case Primitive::kPrimShort:
1145 case Primitive::kPrimInt:
1146 case Primitive::kPrimNot:
1147 DCHECK(InputRegisterAt(instruction, 0).Is(w0));
1148 break;
1149
1150 case Primitive::kPrimLong:
1151 DCHECK(InputRegisterAt(instruction, 0).Is(x0));
1152 break;
1153
1154 default:
1155 LOG(FATAL) << "Unimplemented return type " << type;
1156 }
1157 }
1158 codegen_->GenerateFrameExit();
1159 __ Br(lr);
1160}
1161
1162void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
1163 instruction->SetLocations(nullptr);
1164}
1165
1166void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
1167 codegen_->GenerateFrameExit();
1168 __ Br(lr);
1169}
1170
1171void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
1172 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
1173 Primitive::Type field_type = store->InputAt(1)->GetType();
1174 switch (field_type) {
1175 case Primitive::kPrimBoolean:
1176 case Primitive::kPrimByte:
1177 case Primitive::kPrimChar:
1178 case Primitive::kPrimShort:
1179 case Primitive::kPrimInt:
1180 case Primitive::kPrimNot:
1181 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1182 break;
1183
1184 case Primitive::kPrimLong:
1185 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1186 break;
1187
1188 default:
1189 LOG(FATAL) << "Unimplemented local type " << field_type;
1190 }
1191}
1192
1193void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
1194}
1195
1196void LocationsBuilderARM64::VisitSub(HSub* instruction) {
1197 HandleAddSub(instruction);
1198}
1199
1200void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
1201 HandleAddSub(instruction);
1202}
1203
1204void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1205 LocationSummary* locations =
1206 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1207 locations->SetInAt(0, Location::RequiresRegister());
1208 locations->SetInAt(1, Location::RequiresRegister());
1209 if (instruction->HasUses()) {
1210 locations->SetOut(Location::SameAsFirstInput());
1211 }
1212}
1213
1214void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1215 LocationSummary* locations = instruction->GetLocations();
1216 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1217 instruction, locations->InAt(0), locations->InAt(1));
1218 codegen_->AddSlowPath(slow_path);
1219
1220 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1221 __ B(slow_path->GetEntryLabel(), hs);
1222}
1223
1224void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1225 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
1226}
1227
1228void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1229 // TODO: Improve support for suspend checks.
1230 SuspendCheckSlowPathARM64* slow_path =
1231 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, nullptr);
1232 codegen_->AddSlowPath(slow_path);
1233
1234 __ Subs(wSuspend, wSuspend, 1);
1235 __ B(slow_path->GetEntryLabel(), le);
1236 __ Bind(slow_path->GetReturnLabel());
1237}
1238
1239void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
1240 temp->SetLocations(nullptr);
1241}
1242
1243void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
1244 // Nothing to do, this is driven by the code generator.
1245}
1246
1247} // namespace arm64
1248} // namespace art