blob: 79528ac12820d13d4cbf70b3e895aab0e924bd9d [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/arm64/assembler_arm64.h"
26#include "utils/assembler.h"
27#include "utils/stack_checks.h"
28
29
30using namespace vixl; // NOLINT(build/namespaces)
31
32#ifdef __
33#error "ARM64 Codegen VIXL macro-assembler macro already defined."
34#endif
35
36
37namespace art {
38
39namespace arm64 {
40
41static bool IsFPType(Primitive::Type type) {
42 return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble;
43}
44
45// TODO: clean-up some of the constant definitions.
46static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
47static constexpr int kCurrentMethodStackOffset = 0;
48
49namespace {
50// Convenience helpers to ease conversion to and from VIXL operands.
51
52int VIXLRegCodeFromART(int code) {
53 // TODO: static check?
54 DCHECK_EQ(SP, 31);
55 DCHECK_EQ(WSP, 31);
56 DCHECK_EQ(XZR, 32);
57 DCHECK_EQ(WZR, 32);
58 if (code == SP) {
59 return vixl::kSPRegInternalCode;
60 }
61 if (code == XZR) {
62 return vixl::kZeroRegCode;
63 }
64 return code;
65}
66
67int ARTRegCodeFromVIXL(int code) {
68 // TODO: static check?
69 DCHECK_EQ(SP, 31);
70 DCHECK_EQ(WSP, 31);
71 DCHECK_EQ(XZR, 32);
72 DCHECK_EQ(WZR, 32);
73 if (code == vixl::kSPRegInternalCode) {
74 return SP;
75 }
76 if (code == vixl::kZeroRegCode) {
77 return XZR;
78 }
79 return code;
80}
81
82Register XRegisterFrom(Location location) {
83 return Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
84}
85
86Register WRegisterFrom(Location location) {
87 return Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
88}
89
90Register RegisterFrom(Location location, Primitive::Type type) {
91 DCHECK(type != Primitive::kPrimVoid && !IsFPType(type));
92 return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
93}
94
95Register OutputRegister(HInstruction* instr) {
96 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
97}
98
99Register InputRegisterAt(HInstruction* instr, int input_index) {
100 return RegisterFrom(instr->GetLocations()->InAt(input_index),
101 instr->InputAt(input_index)->GetType());
102}
103
104int64_t Int64ConstantFrom(Location location) {
105 HConstant* instr = location.GetConstant();
106 return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue()
107 : instr->AsLongConstant()->GetValue();
108}
109
110Operand OperandFrom(Location location, Primitive::Type type) {
111 if (location.IsRegister()) {
112 return Operand(RegisterFrom(location, type));
113 } else {
114 return Operand(Int64ConstantFrom(location));
115 }
116}
117
118Operand InputOperandAt(HInstruction* instr, int input_index) {
119 return OperandFrom(instr->GetLocations()->InAt(input_index),
120 instr->InputAt(input_index)->GetType());
121}
122
123MemOperand StackOperandFrom(Location location) {
124 return MemOperand(sp, location.GetStackIndex());
125}
126
127MemOperand HeapOperand(const Register& base, Offset offset) {
128 // A heap reference must be 32bit, so fit in a W register.
129 DCHECK(base.IsW());
130 return MemOperand(base.X(), offset.SizeValue());
131}
132
133MemOperand HeapOperandFrom(Location location, Primitive::Type type, Offset offset) {
134 return HeapOperand(RegisterFrom(location, type), offset);
135}
136
137Location LocationFrom(const Register& reg) {
138 return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
139}
140
141} // namespace
142
143inline Condition ARM64Condition(IfCondition cond) {
144 switch (cond) {
145 case kCondEQ: return eq;
146 case kCondNE: return ne;
147 case kCondLT: return lt;
148 case kCondLE: return le;
149 case kCondGT: return gt;
150 case kCondGE: return ge;
151 default:
152 LOG(FATAL) << "Unknown if condition";
153 }
154 return nv; // Unreachable.
155}
156
157static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
158static constexpr size_t kRuntimeParameterCoreRegistersLength =
159 arraysize(kRuntimeParameterCoreRegisters);
160static const FPRegister kRuntimeParameterFpuRegisters[] = { };
161static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
162
163class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
164 public:
165 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
166
167 InvokeRuntimeCallingConvention()
168 : CallingConvention(kRuntimeParameterCoreRegisters,
169 kRuntimeParameterCoreRegistersLength,
170 kRuntimeParameterFpuRegisters,
171 kRuntimeParameterFpuRegistersLength) {}
172
173 Location GetReturnLocation(Primitive::Type return_type);
174
175 private:
176 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
177};
178
179Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
180 DCHECK_NE(return_type, Primitive::kPrimVoid);
181 if (return_type == Primitive::kPrimFloat || return_type == Primitive::kPrimDouble) {
182 LOG(FATAL) << "Unimplemented return type " << return_type;
183 }
184 return LocationFrom(x0);
185}
186
187#define __ reinterpret_cast<Arm64Assembler*>(codegen->GetAssembler())->vixl_masm_->
188
189class SlowPathCodeARM64 : public SlowPathCode {
190 public:
191 SlowPathCodeARM64() : entry_label_(), exit_label_() {}
192
193 vixl::Label* GetEntryLabel() { return &entry_label_; }
194 vixl::Label* GetExitLabel() { return &exit_label_; }
195
196 private:
197 vixl::Label entry_label_;
198 vixl::Label exit_label_;
199
200 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
201};
202
203class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
204 public:
205 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
206 Location index_location,
207 Location length_location)
208 : instruction_(instruction),
209 index_location_(index_location),
210 length_location_(length_location) {}
211
212 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
213 CodeGeneratorARM64* arm64_codegen = reinterpret_cast<CodeGeneratorARM64*>(codegen);
214 __ Bind(GetEntryLabel());
215 InvokeRuntimeCallingConvention calling_convention;
216 arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(0)),
217 index_location_, Primitive::kPrimInt);
218 arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(1)),
219 length_location_, Primitive::kPrimInt);
220 size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowArrayBounds).SizeValue();
221 __ Ldr(lr, MemOperand(tr, offset));
222 __ Blr(lr);
223 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
224 }
225
226 private:
227 HBoundsCheck* const instruction_;
228 const Location index_location_;
229 const Location length_location_;
230
231 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
232};
233
234class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
235 public:
236 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
237
238 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
239 __ Bind(GetEntryLabel());
240 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowNullPointer).Int32Value();
241 __ Ldr(lr, MemOperand(tr, offset));
242 __ Blr(lr);
243 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
244 }
245
246 private:
247 HNullCheck* const instruction_;
248
249 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
250};
251
252class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
253 public:
254 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
255 HBasicBlock* successor)
256 : instruction_(instruction), successor_(successor) {}
257
258 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
259 size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pTestSuspend).SizeValue();
260 __ Bind(GetEntryLabel());
261 __ Ldr(lr, MemOperand(tr, offset));
262 __ Blr(lr);
263 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
264 __ B(GetReturnLabel());
265 }
266
267 vixl::Label* GetReturnLabel() {
268 DCHECK(successor_ == nullptr);
269 return &return_label_;
270 }
271
272
273 private:
274 HSuspendCheck* const instruction_;
275 // If not null, the block to branch to after the suspend check.
276 HBasicBlock* const successor_;
277
278 // If `successor_` is null, the label to branch to after the suspend check.
279 vixl::Label return_label_;
280
281 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
282};
283
284#undef __
285
286Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
287 Location next_location;
288 if (type == Primitive::kPrimVoid) {
289 LOG(FATAL) << "Unreachable type " << type;
290 }
291
292 if (type == Primitive::kPrimFloat || type == Primitive::kPrimDouble) {
293 LOG(FATAL) << "Unimplemented type " << type;
294 }
295
296 if (gp_index_ < calling_convention.GetNumberOfRegisters()) {
297 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_));
298 if (type == Primitive::kPrimLong) {
299 // Double stack slot reserved on the stack.
300 stack_index_++;
301 }
302 } else { // Stack.
303 if (type == Primitive::kPrimLong) {
304 next_location = Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_));
305 // Double stack slot reserved on the stack.
306 stack_index_++;
307 } else {
308 next_location = Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_));
309 }
310 }
311 // Move to the next register/stack slot.
312 gp_index_++;
313 stack_index_++;
314 return next_location;
315}
316
317CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph)
318 : CodeGenerator(graph,
319 kNumberOfAllocatableRegisters,
320 kNumberOfAllocatableFloatingPointRegisters,
321 kNumberOfAllocatableRegisterPairs),
322 block_labels_(nullptr),
323 location_builder_(graph, this),
324 instruction_visitor_(graph, this) {}
325
326#define __ reinterpret_cast<Arm64Assembler*>(GetAssembler())->vixl_masm_->
327
328void CodeGeneratorARM64::GenerateFrameEntry() {
329 // TODO: Add proper support for the stack overflow check.
330 UseScratchRegisterScope temps(assembler_.vixl_masm_);
331 Register temp = temps.AcquireX();
332 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
333 __ Ldr(temp, MemOperand(temp, 0));
334 RecordPcInfo(nullptr, 0);
335
336 CPURegList preserved_regs = GetFramePreservedRegisters();
337 int frame_size = GetFrameSize();
338 core_spill_mask_ |= preserved_regs.list();
339
340 __ Str(w0, MemOperand(sp, -frame_size, PreIndex));
341 __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
342
343 // Stack layout:
344 // sp[frame_size - 8] : lr.
345 // ... : other preserved registers.
346 // sp[frame_size - regs_size]: first preserved register.
347 // ... : reserved frame space.
348 // sp[0] : context pointer.
349}
350
351void CodeGeneratorARM64::GenerateFrameExit() {
352 int frame_size = GetFrameSize();
353 CPURegList preserved_regs = GetFramePreservedRegisters();
354 __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
355 __ Drop(frame_size);
356}
357
358void CodeGeneratorARM64::Bind(HBasicBlock* block) {
359 __ Bind(GetLabelOf(block));
360}
361
362void CodeGeneratorARM64::MoveHelper(Location destination,
363 Location source,
364 Primitive::Type type) {
365 if (source.Equals(destination)) {
366 return;
367 }
368 if (destination.IsRegister()) {
369 Register dst = RegisterFrom(destination, type);
370 if (source.IsRegister()) {
371 Register src = RegisterFrom(source, type);
372 DCHECK(dst.IsSameSizeAndType(src));
373 __ Mov(dst, src);
374 } else {
375 DCHECK(dst.Is64Bits() || !source.IsDoubleStackSlot());
376 __ Ldr(dst, StackOperandFrom(source));
377 }
378 } else {
379 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
380 if (source.IsRegister()) {
381 __ Str(RegisterFrom(source, type), StackOperandFrom(destination));
382 } else {
383 UseScratchRegisterScope temps(assembler_.vixl_masm_);
384 Register temp = destination.IsDoubleStackSlot() ? temps.AcquireX() : temps.AcquireW();
385 __ Ldr(temp, StackOperandFrom(source));
386 __ Str(temp, StackOperandFrom(destination));
387 }
388 }
389}
390
391void CodeGeneratorARM64::Move(HInstruction* instruction,
392 Location location,
393 HInstruction* move_for) {
394 LocationSummary* locations = instruction->GetLocations();
395 if (locations != nullptr && locations->Out().Equals(location)) {
396 return;
397 }
398
399 Primitive::Type type = instruction->GetType();
400
401 if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
402 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
403 : instruction->AsLongConstant()->GetValue();
404 if (location.IsRegister()) {
405 Register dst = RegisterFrom(location, type);
406 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
407 (instruction->IsLongConstant() && dst.Is64Bits()));
408 __ Mov(dst, value);
409 } else {
410 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
411 UseScratchRegisterScope temps(assembler_.vixl_masm_);
412 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
413 __ Mov(temp, value);
414 __ Str(temp, StackOperandFrom(location));
415 }
416
417 } else if (instruction->IsLoadLocal()) {
418 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
419 switch (type) {
420 case Primitive::kPrimNot:
421 case Primitive::kPrimBoolean:
422 case Primitive::kPrimByte:
423 case Primitive::kPrimChar:
424 case Primitive::kPrimShort:
425 case Primitive::kPrimInt:
426 MoveHelper(location, Location::StackSlot(stack_slot), type);
427 break;
428 case Primitive::kPrimLong:
429 MoveHelper(location, Location::DoubleStackSlot(stack_slot), type);
430 break;
431 default:
432 LOG(FATAL) << "Unimplemented type" << type;
433 }
434
435 } else {
436 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
437 MoveHelper(location, locations->Out(), type);
438 }
439}
440
441size_t CodeGeneratorARM64::FrameEntrySpillSize() const {
442 return GetFramePreservedRegistersSize();
443}
444
445Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
446 Primitive::Type type = load->GetType();
447 switch (type) {
448 case Primitive::kPrimNot:
449 case Primitive::kPrimBoolean:
450 case Primitive::kPrimByte:
451 case Primitive::kPrimChar:
452 case Primitive::kPrimShort:
453 case Primitive::kPrimInt:
454 return Location::StackSlot(GetStackSlot(load->GetLocal()));
455 case Primitive::kPrimLong:
456 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
457 case Primitive::kPrimFloat:
458 case Primitive::kPrimDouble:
459 LOG(FATAL) << "Unimplemented type " << type;
460 break;
461 case Primitive::kPrimVoid:
462 default:
463 LOG(FATAL) << "Unexpected type " << type;
464 }
465 LOG(FATAL) << "Unreachable";
466 return Location::NoLocation();
467}
468
469void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
470 UseScratchRegisterScope temps(assembler_.vixl_masm_);
471 Register card = temps.AcquireX();
472 Register temp = temps.AcquireX();
473 vixl::Label done;
474 __ Cbz(value, &done);
475 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
476 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
477 __ Strb(card, MemOperand(card, temp));
478 __ Bind(&done);
479}
480
481void CodeGeneratorARM64::SetupBlockedRegisters() const {
482 // Block reserved registers:
483 // ip0 (VIXL temporary)
484 // ip1 (VIXL temporary)
485 // xSuspend (Suspend counter)
486 // lr
487 // sp is not part of the allocatable registers, so we don't need to block it.
488 CPURegList reserved_core_registers = vixl_reserved_core_registers;
489 reserved_core_registers.Combine(runtime_reserved_core_registers);
490 // TODO: See if we should instead allow allocating but preserve those if used.
491 reserved_core_registers.Combine(quick_callee_saved_registers);
492 while (!reserved_core_registers.IsEmpty()) {
493 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
494 }
495}
496
497Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
498 if (type == Primitive::kPrimVoid) {
499 LOG(FATAL) << "Unreachable type " << type;
500 }
501
502 if (type == Primitive::kPrimFloat || type == Primitive::kPrimDouble) {
503 LOG(FATAL) << "Unimplemented support for floating-point";
504 }
505
506 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfXRegisters);
507 DCHECK_NE(reg, -1);
508 blocked_core_registers_[reg] = true;
509
510 if (IsFPType(type)) {
511 return Location::FpuRegisterLocation(reg);
512 } else {
513 return Location::RegisterLocation(reg);
514 }
515}
516
517void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
518 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
519}
520
521void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
522 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
523}
524
525#undef __
526#define __ assembler_->vixl_masm_->
527
528InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
529 CodeGeneratorARM64* codegen)
530 : HGraphVisitor(graph),
531 assembler_(codegen->GetAssembler()),
532 codegen_(codegen) {}
533
534#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
535 M(ArrayGet) \
536 M(ArraySet) \
537 M(DoubleConstant) \
538 M(FloatConstant) \
539 M(Mul) \
540 M(Neg) \
541 M(NewArray) \
542 M(ParallelMove)
543
544#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
545
546enum UnimplementedInstructionBreakCode {
547#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
548 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
549#undef ENUM_UNIMPLEMENTED_INSTRUCTION
550};
551
552#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
553 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
554 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
555 } \
556 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
557 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
558 locations->SetOut(Location::Any()); \
559 }
560 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
561#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
562
563#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
564
565void LocationsBuilderARM64::HandleAddSub(HBinaryOperation* instr) {
566 DCHECK(instr->IsAdd() || instr->IsSub());
567 DCHECK_EQ(instr->InputCount(), 2U);
568 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
569 Primitive::Type type = instr->GetResultType();
570 switch (type) {
571 case Primitive::kPrimInt:
572 case Primitive::kPrimLong: {
573 locations->SetInAt(0, Location::RequiresRegister());
574 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
575 locations->SetOut(Location::RequiresRegister());
576 break;
577 }
578 case Primitive::kPrimBoolean:
579 case Primitive::kPrimByte:
580 case Primitive::kPrimChar:
581 case Primitive::kPrimShort:
582 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
583 break;
584 default:
585 LOG(FATAL) << "Unimplemented " << instr->DebugName() << " type " << type;
586 }
587}
588
589void InstructionCodeGeneratorARM64::HandleAddSub(HBinaryOperation* instr) {
590 DCHECK(instr->IsAdd() || instr->IsSub());
591
592 Primitive::Type type = instr->GetType();
593 Register dst = OutputRegister(instr);
594 Register lhs = InputRegisterAt(instr, 0);
595 Operand rhs = InputOperandAt(instr, 1);
596
597 switch (type) {
598 case Primitive::kPrimInt:
599 case Primitive::kPrimLong:
600 if (instr->IsAdd()) {
601 __ Add(dst, lhs, rhs);
602 } else {
603 __ Sub(dst, lhs, rhs);
604 }
605 break;
606
607 case Primitive::kPrimBoolean:
608 case Primitive::kPrimByte:
609 case Primitive::kPrimChar:
610 case Primitive::kPrimShort:
611 LOG(FATAL) << "Unexpected add/sub type " << type;
612 break;
613 default:
614 LOG(FATAL) << "Unimplemented add/sub type " << type;
615 }
616}
617
618void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
619 HandleAddSub(instruction);
620}
621
622void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
623 HandleAddSub(instruction);
624}
625
626void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
627 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
628 locations->SetInAt(0, Location::RequiresRegister());
629 locations->SetOut(Location::RequiresRegister());
630}
631
632void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
633 __ Ldr(OutputRegister(instruction),
634 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
635}
636
637void LocationsBuilderARM64::VisitCompare(HCompare* instruction) {
638 LocationSummary* locations =
639 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
640 locations->SetInAt(0, Location::RequiresRegister());
641 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
642 locations->SetOut(Location::RequiresRegister());
643}
644
645void InstructionCodeGeneratorARM64::VisitCompare(HCompare* instruction) {
646 Primitive::Type in_type = instruction->InputAt(0)->GetType();
647
648 DCHECK_EQ(in_type, Primitive::kPrimLong);
649 switch (in_type) {
650 case Primitive::kPrimLong: {
651 vixl::Label done;
652 Register result = OutputRegister(instruction);
653 Register left = InputRegisterAt(instruction, 0);
654 Operand right = InputOperandAt(instruction, 1);
655 __ Subs(result, left, right);
656 __ B(eq, &done);
657 __ Mov(result, 1);
658 __ Cneg(result, result, le);
659 __ Bind(&done);
660 break;
661 }
662 default:
663 LOG(FATAL) << "Unimplemented compare type " << in_type;
664 }
665}
666
667void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
668 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
669 locations->SetInAt(0, Location::RequiresRegister());
670 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
671 if (instruction->NeedsMaterialization()) {
672 locations->SetOut(Location::RequiresRegister());
673 }
674}
675
676void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
677 if (!instruction->NeedsMaterialization()) {
678 return;
679 }
680
681 LocationSummary* locations = instruction->GetLocations();
682 Register lhs = InputRegisterAt(instruction, 0);
683 Operand rhs = InputOperandAt(instruction, 1);
684 Register res = RegisterFrom(locations->Out(), instruction->GetType());
685 Condition cond = ARM64Condition(instruction->GetCondition());
686
687 __ Cmp(lhs, rhs);
688 __ Csel(res, vixl::Assembler::AppropriateZeroRegFor(res), Operand(1), InvertCondition(cond));
689}
690
691#define FOR_EACH_CONDITION_INSTRUCTION(M) \
692 M(Equal) \
693 M(NotEqual) \
694 M(LessThan) \
695 M(LessThanOrEqual) \
696 M(GreaterThan) \
697 M(GreaterThanOrEqual)
698#define DEFINE_CONDITION_VISITORS(Name) \
699void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
700void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
701FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
702#undef FOR_EACH_CONDITION_INSTRUCTION
703
704void LocationsBuilderARM64::VisitExit(HExit* exit) {
705 exit->SetLocations(nullptr);
706}
707
708void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
709 if (kIsDebugBuild) {
710 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
711 __ Brk(0); // TODO: Introduce special markers for such code locations.
712 }
713}
714
715void LocationsBuilderARM64::VisitGoto(HGoto* got) {
716 got->SetLocations(nullptr);
717}
718
719void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
720 HBasicBlock* successor = got->GetSuccessor();
721 // TODO: Support for suspend checks emission.
722 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
723 __ B(codegen_->GetLabelOf(successor));
724 }
725}
726
727void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
728 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
729 HInstruction* cond = if_instr->InputAt(0);
730 DCHECK(cond->IsCondition());
731 if (cond->AsCondition()->NeedsMaterialization()) {
732 locations->SetInAt(0, Location::RequiresRegister());
733 }
734}
735
736void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
737 HInstruction* cond = if_instr->InputAt(0);
738 DCHECK(cond->IsCondition());
739 HCondition* condition = cond->AsCondition();
740 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
741 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
742
743 // TODO: Support constant condition input in VisitIf.
744
745 if (condition->NeedsMaterialization()) {
746 // The condition instruction has been materialized, compare the output to 0.
747 Location cond_val = if_instr->GetLocations()->InAt(0);
748 DCHECK(cond_val.IsRegister());
749 __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
750
751 } else {
752 // The condition instruction has not been materialized, use its inputs as
753 // the comparison and its condition as the branch condition.
754 Register lhs = InputRegisterAt(condition, 0);
755 Operand rhs = InputOperandAt(condition, 1);
756 Condition cond = ARM64Condition(condition->GetCondition());
757 if ((cond == eq || cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
758 if (cond == eq) {
759 __ Cbz(lhs, true_target);
760 } else {
761 __ Cbnz(lhs, true_target);
762 }
763 } else {
764 __ Cmp(lhs, rhs);
765 __ B(cond, true_target);
766 }
767 }
768
769 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
770 __ B(false_target);
771 }
772}
773
774void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
775 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
776 locations->SetInAt(0, Location::RequiresRegister());
777 locations->SetOut(Location::RequiresRegister());
778}
779
780void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
781 Primitive::Type res_type = instruction->GetType();
782 Register res = OutputRegister(instruction);
783 Register obj = InputRegisterAt(instruction, 0);
784 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
785
786 switch (res_type) {
787 case Primitive::kPrimBoolean: {
788 __ Ldrb(res, MemOperand(obj, offset));
789 break;
790 }
791 case Primitive::kPrimByte: {
792 __ Ldrsb(res, MemOperand(obj, offset));
793 break;
794 }
795 case Primitive::kPrimShort: {
796 __ Ldrsh(res, MemOperand(obj, offset));
797 break;
798 }
799 case Primitive::kPrimChar: {
800 __ Ldrh(res, MemOperand(obj, offset));
801 break;
802 }
803 case Primitive::kPrimInt:
804 case Primitive::kPrimNot:
805 case Primitive::kPrimLong: { // TODO: support volatile.
806 DCHECK(res.IsX() == (res_type == Primitive::kPrimLong));
807 __ Ldr(res, MemOperand(obj, offset));
808 break;
809 }
810
811 case Primitive::kPrimFloat:
812 case Primitive::kPrimDouble:
813 LOG(FATAL) << "Unimplemented register res_type " << res_type;
814 break;
815
816 case Primitive::kPrimVoid:
817 LOG(FATAL) << "Unreachable res_type " << res_type;
818 }
819}
820
821void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
822 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
823 locations->SetInAt(0, Location::RequiresRegister());
824 locations->SetInAt(1, Location::RequiresRegister());
825}
826
827void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
828 Register obj = InputRegisterAt(instruction, 0);
829 Register value = InputRegisterAt(instruction, 1);
830 Primitive::Type field_type = instruction->InputAt(1)->GetType();
831 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
832
833 switch (field_type) {
834 case Primitive::kPrimBoolean:
835 case Primitive::kPrimByte: {
836 __ Strb(value, MemOperand(obj, offset));
837 break;
838 }
839
840 case Primitive::kPrimShort:
841 case Primitive::kPrimChar: {
842 __ Strh(value, MemOperand(obj, offset));
843 break;
844 }
845
846 case Primitive::kPrimInt:
847 case Primitive::kPrimNot:
848 case Primitive::kPrimLong: {
849 DCHECK(value.IsX() == (field_type == Primitive::kPrimLong));
850 __ Str(value, MemOperand(obj, offset));
851
852 if (field_type == Primitive::kPrimNot) {
853 codegen_->MarkGCCard(obj, value);
854 }
855 break;
856 }
857
858 case Primitive::kPrimFloat:
859 case Primitive::kPrimDouble:
860 LOG(FATAL) << "Unimplemented register type " << field_type;
861 break;
862
863 case Primitive::kPrimVoid:
864 LOG(FATAL) << "Unreachable type " << field_type;
865 }
866}
867
868void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
869 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
870 locations->SetOut(Location::ConstantLocation(constant));
871}
872
873void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
874 // Will be generated at use site.
875}
876
877void LocationsBuilderARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
878 HandleInvoke(invoke);
879}
880
881void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
882 HandleInvoke(invoke);
883}
884
885void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
886 LocationSummary* locations =
887 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
888 locations->AddTemp(LocationFrom(x0));
889
890 InvokeDexCallingConventionVisitor calling_convention_visitor;
891 for (size_t i = 0; i < invoke->InputCount(); i++) {
892 HInstruction* input = invoke->InputAt(i);
893 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
894 }
895
896 Primitive::Type return_type = invoke->GetType();
897 if (return_type != Primitive::kPrimVoid) {
898 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
899 }
900}
901
902void InstructionCodeGeneratorARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
903 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
904 // Make sure that ArtMethod* is passed in W0 as per the calling convention
905 DCHECK(temp.Is(w0));
906 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
907 invoke->GetIndexInDexCache() * kHeapRefSize;
908
909 // TODO: Implement all kinds of calls:
910 // 1) boot -> boot
911 // 2) app -> boot
912 // 3) app -> app
913 //
914 // Currently we implement the app -> app logic, which looks up in the resolve cache.
915
916 // temp = method;
917 __ Ldr(temp, MemOperand(sp, kCurrentMethodStackOffset));
918 // temp = temp->dex_cache_resolved_methods_;
919 __ Ldr(temp, MemOperand(temp.X(), mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
920 // temp = temp[index_in_cache];
921 __ Ldr(temp, MemOperand(temp.X(), index_in_cache));
922 // lr = temp->entry_point_from_quick_compiled_code_;
923 __ Ldr(lr, MemOperand(temp.X(), mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
924 // lr();
925 __ Blr(lr);
926
927 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
928 DCHECK(!codegen_->IsLeafMethod());
929}
930
931void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
932 LocationSummary* locations = invoke->GetLocations();
933 Location receiver = locations->InAt(0);
934 Register temp = XRegisterFrom(invoke->GetLocations()->GetTemp(0));
935 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
936 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
937 Offset class_offset = mirror::Object::ClassOffset();
938 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset();
939
940 // temp = object->GetClass();
941 if (receiver.IsStackSlot()) {
942 __ Ldr(temp.W(), MemOperand(sp, receiver.GetStackIndex()));
943 __ Ldr(temp.W(), MemOperand(temp, class_offset.SizeValue()));
944 } else {
945 DCHECK(receiver.IsRegister());
946 __ Ldr(temp.W(), HeapOperandFrom(receiver, Primitive::kPrimNot,
947 class_offset));
948 }
949 // temp = temp->GetMethodAt(method_offset);
950 __ Ldr(temp.W(), MemOperand(temp, method_offset));
951 // lr = temp->GetEntryPoint();
952 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
953 // lr();
954 __ Blr(lr);
955 DCHECK(!codegen_->IsLeafMethod());
956 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
957}
958
959void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
960 load->SetLocations(nullptr);
961}
962
963void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
964 // Nothing to do, this is driven by the code generator.
965}
966
967void LocationsBuilderARM64::VisitLocal(HLocal* local) {
968 local->SetLocations(nullptr);
969}
970
971void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
972 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
973}
974
975void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
976 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
977 locations->SetOut(Location::ConstantLocation(constant));
978}
979
980void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
981 // Will be generated at use site.
982}
983
984void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
985 LocationSummary* locations =
986 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
987 InvokeRuntimeCallingConvention calling_convention;
988 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
989 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
990 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
991}
992
993void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
994 LocationSummary* locations = instruction->GetLocations();
995 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
996 DCHECK(type_index.Is(w0));
997 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
998 DCHECK(current_method.Is(w1));
999 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1000 __ Mov(type_index, instruction->GetTypeIndex());
1001 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocObjectWithAccessCheck).Int32Value()));
1002 __ Blr(lr);
1003 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1004 DCHECK(!codegen_->IsLeafMethod());
1005}
1006
1007void LocationsBuilderARM64::VisitNot(HNot* instruction) {
1008 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1009 locations->SetInAt(0, Location::RequiresRegister());
1010 locations->SetOut(Location::RequiresRegister());
1011}
1012
1013void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
1014 switch (instruction->InputAt(0)->GetType()) {
1015 case Primitive::kPrimBoolean:
1016 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1));
1017 break;
1018
1019 case Primitive::kPrimInt:
1020 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
1021 break;
1022
1023 case Primitive::kPrimLong:
1024 LOG(FATAL) << "Not yet implemented type for not operation " << instruction->GetResultType();
1025 break;
1026
1027 default:
1028 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
1029 }
1030}
1031
1032void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
1033 LocationSummary* locations =
1034 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1035 locations->SetInAt(0, Location::RequiresRegister());
1036 if (instruction->HasUses()) {
1037 locations->SetOut(Location::SameAsFirstInput());
1038 }
1039}
1040
1041void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
1042 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
1043 codegen_->AddSlowPath(slow_path);
1044
1045 LocationSummary* locations = instruction->GetLocations();
1046 Location obj = locations->InAt(0);
1047 if (obj.IsRegister()) {
1048 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
1049 } else {
1050 DCHECK(obj.IsConstant()) << obj;
1051 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1052 __ B(slow_path->GetEntryLabel());
1053 }
1054}
1055
1056void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
1057 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1058 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1059 if (location.IsStackSlot()) {
1060 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1061 } else if (location.IsDoubleStackSlot()) {
1062 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1063 }
1064 locations->SetOut(location);
1065}
1066
1067void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
1068 // Nothing to do, the parameter is already at its location.
1069}
1070
1071void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
1072 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1073 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1074 locations->SetInAt(i, Location::Any());
1075 }
1076 locations->SetOut(Location::Any());
1077}
1078
1079void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
1080 LOG(FATAL) << "Unreachable";
1081}
1082
1083void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
1084 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1085 Primitive::Type return_type = instruction->InputAt(0)->GetType();
1086
1087 if (return_type == Primitive::kPrimFloat || return_type == Primitive::kPrimDouble) {
1088 LOG(FATAL) << "Unimplemented return type " << return_type;
1089 }
1090
1091 locations->SetInAt(0, LocationFrom(x0));
1092}
1093
1094void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
1095 if (kIsDebugBuild) {
1096 Primitive::Type type = instruction->InputAt(0)->GetType();
1097 switch (type) {
1098 case Primitive::kPrimBoolean:
1099 case Primitive::kPrimByte:
1100 case Primitive::kPrimChar:
1101 case Primitive::kPrimShort:
1102 case Primitive::kPrimInt:
1103 case Primitive::kPrimNot:
1104 DCHECK(InputRegisterAt(instruction, 0).Is(w0));
1105 break;
1106
1107 case Primitive::kPrimLong:
1108 DCHECK(InputRegisterAt(instruction, 0).Is(x0));
1109 break;
1110
1111 default:
1112 LOG(FATAL) << "Unimplemented return type " << type;
1113 }
1114 }
1115 codegen_->GenerateFrameExit();
1116 __ Br(lr);
1117}
1118
1119void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
1120 instruction->SetLocations(nullptr);
1121}
1122
1123void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
1124 codegen_->GenerateFrameExit();
1125 __ Br(lr);
1126}
1127
1128void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
1129 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
1130 Primitive::Type field_type = store->InputAt(1)->GetType();
1131 switch (field_type) {
1132 case Primitive::kPrimBoolean:
1133 case Primitive::kPrimByte:
1134 case Primitive::kPrimChar:
1135 case Primitive::kPrimShort:
1136 case Primitive::kPrimInt:
1137 case Primitive::kPrimNot:
1138 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1139 break;
1140
1141 case Primitive::kPrimLong:
1142 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1143 break;
1144
1145 default:
1146 LOG(FATAL) << "Unimplemented local type " << field_type;
1147 }
1148}
1149
1150void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
1151}
1152
1153void LocationsBuilderARM64::VisitSub(HSub* instruction) {
1154 HandleAddSub(instruction);
1155}
1156
1157void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
1158 HandleAddSub(instruction);
1159}
1160
1161void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1162 LocationSummary* locations =
1163 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1164 locations->SetInAt(0, Location::RequiresRegister());
1165 locations->SetInAt(1, Location::RequiresRegister());
1166 if (instruction->HasUses()) {
1167 locations->SetOut(Location::SameAsFirstInput());
1168 }
1169}
1170
1171void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1172 LocationSummary* locations = instruction->GetLocations();
1173 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1174 instruction, locations->InAt(0), locations->InAt(1));
1175 codegen_->AddSlowPath(slow_path);
1176
1177 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1178 __ B(slow_path->GetEntryLabel(), hs);
1179}
1180
1181void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1182 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
1183}
1184
1185void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1186 // TODO: Improve support for suspend checks.
1187 SuspendCheckSlowPathARM64* slow_path =
1188 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, nullptr);
1189 codegen_->AddSlowPath(slow_path);
1190
1191 __ Subs(wSuspend, wSuspend, 1);
1192 __ B(slow_path->GetEntryLabel(), le);
1193 __ Bind(slow_path->GetReturnLabel());
1194}
1195
1196void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
1197 temp->SetLocations(nullptr);
1198}
1199
1200void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
1201 // Nothing to do, this is driven by the code generator.
1202}
1203
1204} // namespace arm64
1205} // namespace art