blob: f9aa44b5b767387b17f808753be4e98e52a900b7 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/arm64/assembler_arm64.h"
26#include "utils/assembler.h"
27#include "utils/stack_checks.h"
28
29
30using namespace vixl; // NOLINT(build/namespaces)
31
32#ifdef __
33#error "ARM64 Codegen VIXL macro-assembler macro already defined."
34#endif
35
36
37namespace art {
38
39namespace arm64 {
40
41static bool IsFPType(Primitive::Type type) {
42 return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble;
43}
44
45// TODO: clean-up some of the constant definitions.
46static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
47static constexpr int kCurrentMethodStackOffset = 0;
48
49namespace {
50// Convenience helpers to ease conversion to and from VIXL operands.
51
52int VIXLRegCodeFromART(int code) {
53 // TODO: static check?
54 DCHECK_EQ(SP, 31);
55 DCHECK_EQ(WSP, 31);
56 DCHECK_EQ(XZR, 32);
57 DCHECK_EQ(WZR, 32);
58 if (code == SP) {
59 return vixl::kSPRegInternalCode;
60 }
61 if (code == XZR) {
62 return vixl::kZeroRegCode;
63 }
64 return code;
65}
66
67int ARTRegCodeFromVIXL(int code) {
68 // TODO: static check?
69 DCHECK_EQ(SP, 31);
70 DCHECK_EQ(WSP, 31);
71 DCHECK_EQ(XZR, 32);
72 DCHECK_EQ(WZR, 32);
73 if (code == vixl::kSPRegInternalCode) {
74 return SP;
75 }
76 if (code == vixl::kZeroRegCode) {
77 return XZR;
78 }
79 return code;
80}
81
82Register XRegisterFrom(Location location) {
83 return Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
84}
85
86Register WRegisterFrom(Location location) {
87 return Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
88}
89
90Register RegisterFrom(Location location, Primitive::Type type) {
91 DCHECK(type != Primitive::kPrimVoid && !IsFPType(type));
92 return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
93}
94
95Register OutputRegister(HInstruction* instr) {
96 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
97}
98
99Register InputRegisterAt(HInstruction* instr, int input_index) {
100 return RegisterFrom(instr->GetLocations()->InAt(input_index),
101 instr->InputAt(input_index)->GetType());
102}
103
104int64_t Int64ConstantFrom(Location location) {
105 HConstant* instr = location.GetConstant();
106 return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue()
107 : instr->AsLongConstant()->GetValue();
108}
109
110Operand OperandFrom(Location location, Primitive::Type type) {
111 if (location.IsRegister()) {
112 return Operand(RegisterFrom(location, type));
113 } else {
114 return Operand(Int64ConstantFrom(location));
115 }
116}
117
118Operand InputOperandAt(HInstruction* instr, int input_index) {
119 return OperandFrom(instr->GetLocations()->InAt(input_index),
120 instr->InputAt(input_index)->GetType());
121}
122
123MemOperand StackOperandFrom(Location location) {
124 return MemOperand(sp, location.GetStackIndex());
125}
126
127MemOperand HeapOperand(const Register& base, Offset offset) {
128 // A heap reference must be 32bit, so fit in a W register.
129 DCHECK(base.IsW());
130 return MemOperand(base.X(), offset.SizeValue());
131}
132
133MemOperand HeapOperandFrom(Location location, Primitive::Type type, Offset offset) {
134 return HeapOperand(RegisterFrom(location, type), offset);
135}
136
137Location LocationFrom(const Register& reg) {
138 return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
139}
140
141} // namespace
142
143inline Condition ARM64Condition(IfCondition cond) {
144 switch (cond) {
145 case kCondEQ: return eq;
146 case kCondNE: return ne;
147 case kCondLT: return lt;
148 case kCondLE: return le;
149 case kCondGT: return gt;
150 case kCondGE: return ge;
151 default:
152 LOG(FATAL) << "Unknown if condition";
153 }
154 return nv; // Unreachable.
155}
156
157static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
158static constexpr size_t kRuntimeParameterCoreRegistersLength =
159 arraysize(kRuntimeParameterCoreRegisters);
160static const FPRegister kRuntimeParameterFpuRegisters[] = { };
161static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
162
163class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
164 public:
165 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
166
167 InvokeRuntimeCallingConvention()
168 : CallingConvention(kRuntimeParameterCoreRegisters,
169 kRuntimeParameterCoreRegistersLength,
170 kRuntimeParameterFpuRegisters,
171 kRuntimeParameterFpuRegistersLength) {}
172
173 Location GetReturnLocation(Primitive::Type return_type);
174
175 private:
176 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
177};
178
179Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
180 DCHECK_NE(return_type, Primitive::kPrimVoid);
181 if (return_type == Primitive::kPrimFloat || return_type == Primitive::kPrimDouble) {
182 LOG(FATAL) << "Unimplemented return type " << return_type;
183 }
184 return LocationFrom(x0);
185}
186
187#define __ reinterpret_cast<Arm64Assembler*>(codegen->GetAssembler())->vixl_masm_->
188
189class SlowPathCodeARM64 : public SlowPathCode {
190 public:
191 SlowPathCodeARM64() : entry_label_(), exit_label_() {}
192
193 vixl::Label* GetEntryLabel() { return &entry_label_; }
194 vixl::Label* GetExitLabel() { return &exit_label_; }
195
196 private:
197 vixl::Label entry_label_;
198 vixl::Label exit_label_;
199
200 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
201};
202
203class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
204 public:
205 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
206 Location index_location,
207 Location length_location)
208 : instruction_(instruction),
209 index_location_(index_location),
210 length_location_(length_location) {}
211
212 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
213 CodeGeneratorARM64* arm64_codegen = reinterpret_cast<CodeGeneratorARM64*>(codegen);
214 __ Bind(GetEntryLabel());
215 InvokeRuntimeCallingConvention calling_convention;
216 arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(0)),
217 index_location_, Primitive::kPrimInt);
218 arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(1)),
219 length_location_, Primitive::kPrimInt);
220 size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowArrayBounds).SizeValue();
221 __ Ldr(lr, MemOperand(tr, offset));
222 __ Blr(lr);
223 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
224 }
225
226 private:
227 HBoundsCheck* const instruction_;
228 const Location index_location_;
229 const Location length_location_;
230
231 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
232};
233
234class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
235 public:
236 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
237
238 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
239 __ Bind(GetEntryLabel());
240 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowNullPointer).Int32Value();
241 __ Ldr(lr, MemOperand(tr, offset));
242 __ Blr(lr);
243 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
244 }
245
246 private:
247 HNullCheck* const instruction_;
248
249 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
250};
251
252class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
253 public:
254 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
255 HBasicBlock* successor)
256 : instruction_(instruction), successor_(successor) {}
257
258 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
259 size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pTestSuspend).SizeValue();
260 __ Bind(GetEntryLabel());
261 __ Ldr(lr, MemOperand(tr, offset));
262 __ Blr(lr);
263 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
264 __ B(GetReturnLabel());
265 }
266
267 vixl::Label* GetReturnLabel() {
268 DCHECK(successor_ == nullptr);
269 return &return_label_;
270 }
271
272
273 private:
274 HSuspendCheck* const instruction_;
275 // If not null, the block to branch to after the suspend check.
276 HBasicBlock* const successor_;
277
278 // If `successor_` is null, the label to branch to after the suspend check.
279 vixl::Label return_label_;
280
281 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
282};
283
284#undef __
285
286Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
287 Location next_location;
288 if (type == Primitive::kPrimVoid) {
289 LOG(FATAL) << "Unreachable type " << type;
290 }
291
292 if (type == Primitive::kPrimFloat || type == Primitive::kPrimDouble) {
293 LOG(FATAL) << "Unimplemented type " << type;
294 }
295
296 if (gp_index_ < calling_convention.GetNumberOfRegisters()) {
297 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_));
298 if (type == Primitive::kPrimLong) {
299 // Double stack slot reserved on the stack.
300 stack_index_++;
301 }
302 } else { // Stack.
303 if (type == Primitive::kPrimLong) {
304 next_location = Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_));
305 // Double stack slot reserved on the stack.
306 stack_index_++;
307 } else {
308 next_location = Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_));
309 }
310 }
311 // Move to the next register/stack slot.
312 gp_index_++;
313 stack_index_++;
314 return next_location;
315}
316
317CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph)
318 : CodeGenerator(graph,
319 kNumberOfAllocatableRegisters,
320 kNumberOfAllocatableFloatingPointRegisters,
321 kNumberOfAllocatableRegisterPairs),
322 block_labels_(nullptr),
323 location_builder_(graph, this),
324 instruction_visitor_(graph, this) {}
325
326#define __ reinterpret_cast<Arm64Assembler*>(GetAssembler())->vixl_masm_->
327
328void CodeGeneratorARM64::GenerateFrameEntry() {
329 // TODO: Add proper support for the stack overflow check.
330 UseScratchRegisterScope temps(assembler_.vixl_masm_);
331 Register temp = temps.AcquireX();
332 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
333 __ Ldr(temp, MemOperand(temp, 0));
334 RecordPcInfo(nullptr, 0);
335
336 CPURegList preserved_regs = GetFramePreservedRegisters();
337 int frame_size = GetFrameSize();
338 core_spill_mask_ |= preserved_regs.list();
339
340 __ Str(w0, MemOperand(sp, -frame_size, PreIndex));
341 __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
342
343 // Stack layout:
344 // sp[frame_size - 8] : lr.
345 // ... : other preserved registers.
346 // sp[frame_size - regs_size]: first preserved register.
347 // ... : reserved frame space.
348 // sp[0] : context pointer.
349}
350
351void CodeGeneratorARM64::GenerateFrameExit() {
352 int frame_size = GetFrameSize();
353 CPURegList preserved_regs = GetFramePreservedRegisters();
354 __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
355 __ Drop(frame_size);
356}
357
358void CodeGeneratorARM64::Bind(HBasicBlock* block) {
359 __ Bind(GetLabelOf(block));
360}
361
362void CodeGeneratorARM64::MoveHelper(Location destination,
363 Location source,
364 Primitive::Type type) {
365 if (source.Equals(destination)) {
366 return;
367 }
368 if (destination.IsRegister()) {
369 Register dst = RegisterFrom(destination, type);
370 if (source.IsRegister()) {
371 Register src = RegisterFrom(source, type);
372 DCHECK(dst.IsSameSizeAndType(src));
373 __ Mov(dst, src);
374 } else {
375 DCHECK(dst.Is64Bits() || !source.IsDoubleStackSlot());
376 __ Ldr(dst, StackOperandFrom(source));
377 }
378 } else {
379 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
380 if (source.IsRegister()) {
381 __ Str(RegisterFrom(source, type), StackOperandFrom(destination));
382 } else {
383 UseScratchRegisterScope temps(assembler_.vixl_masm_);
384 Register temp = destination.IsDoubleStackSlot() ? temps.AcquireX() : temps.AcquireW();
385 __ Ldr(temp, StackOperandFrom(source));
386 __ Str(temp, StackOperandFrom(destination));
387 }
388 }
389}
390
391void CodeGeneratorARM64::Move(HInstruction* instruction,
392 Location location,
393 HInstruction* move_for) {
394 LocationSummary* locations = instruction->GetLocations();
395 if (locations != nullptr && locations->Out().Equals(location)) {
396 return;
397 }
398
399 Primitive::Type type = instruction->GetType();
400
401 if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
402 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
403 : instruction->AsLongConstant()->GetValue();
404 if (location.IsRegister()) {
405 Register dst = RegisterFrom(location, type);
406 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
407 (instruction->IsLongConstant() && dst.Is64Bits()));
408 __ Mov(dst, value);
409 } else {
410 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
411 UseScratchRegisterScope temps(assembler_.vixl_masm_);
412 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
413 __ Mov(temp, value);
414 __ Str(temp, StackOperandFrom(location));
415 }
416
417 } else if (instruction->IsLoadLocal()) {
418 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
419 switch (type) {
420 case Primitive::kPrimNot:
421 case Primitive::kPrimBoolean:
422 case Primitive::kPrimByte:
423 case Primitive::kPrimChar:
424 case Primitive::kPrimShort:
425 case Primitive::kPrimInt:
426 MoveHelper(location, Location::StackSlot(stack_slot), type);
427 break;
428 case Primitive::kPrimLong:
429 MoveHelper(location, Location::DoubleStackSlot(stack_slot), type);
430 break;
431 default:
432 LOG(FATAL) << "Unimplemented type" << type;
433 }
434
435 } else {
436 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
437 MoveHelper(location, locations->Out(), type);
438 }
439}
440
441size_t CodeGeneratorARM64::FrameEntrySpillSize() const {
442 return GetFramePreservedRegistersSize();
443}
444
445Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
446 Primitive::Type type = load->GetType();
447 switch (type) {
448 case Primitive::kPrimNot:
449 case Primitive::kPrimBoolean:
450 case Primitive::kPrimByte:
451 case Primitive::kPrimChar:
452 case Primitive::kPrimShort:
453 case Primitive::kPrimInt:
454 return Location::StackSlot(GetStackSlot(load->GetLocal()));
455 case Primitive::kPrimLong:
456 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
457 case Primitive::kPrimFloat:
458 case Primitive::kPrimDouble:
459 LOG(FATAL) << "Unimplemented type " << type;
460 break;
461 case Primitive::kPrimVoid:
462 default:
463 LOG(FATAL) << "Unexpected type " << type;
464 }
465 LOG(FATAL) << "Unreachable";
466 return Location::NoLocation();
467}
468
469void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
470 UseScratchRegisterScope temps(assembler_.vixl_masm_);
471 Register card = temps.AcquireX();
472 Register temp = temps.AcquireX();
473 vixl::Label done;
474 __ Cbz(value, &done);
475 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
476 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
477 __ Strb(card, MemOperand(card, temp));
478 __ Bind(&done);
479}
480
481void CodeGeneratorARM64::SetupBlockedRegisters() const {
482 // Block reserved registers:
483 // ip0 (VIXL temporary)
484 // ip1 (VIXL temporary)
485 // xSuspend (Suspend counter)
486 // lr
487 // sp is not part of the allocatable registers, so we don't need to block it.
488 CPURegList reserved_core_registers = vixl_reserved_core_registers;
489 reserved_core_registers.Combine(runtime_reserved_core_registers);
490 // TODO: See if we should instead allow allocating but preserve those if used.
491 reserved_core_registers.Combine(quick_callee_saved_registers);
492 while (!reserved_core_registers.IsEmpty()) {
493 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
494 }
495}
496
497Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
498 if (type == Primitive::kPrimVoid) {
499 LOG(FATAL) << "Unreachable type " << type;
500 }
501
502 if (type == Primitive::kPrimFloat || type == Primitive::kPrimDouble) {
503 LOG(FATAL) << "Unimplemented support for floating-point";
504 }
505
506 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfXRegisters);
507 DCHECK_NE(reg, -1);
508 blocked_core_registers_[reg] = true;
509
510 if (IsFPType(type)) {
511 return Location::FpuRegisterLocation(reg);
512 } else {
513 return Location::RegisterLocation(reg);
514 }
515}
516
517void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
518 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
519}
520
521void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
522 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
523}
524
525#undef __
526#define __ assembler_->vixl_masm_->
527
528InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
529 CodeGeneratorARM64* codegen)
530 : HGraphVisitor(graph),
531 assembler_(codegen->GetAssembler()),
532 codegen_(codegen) {}
533
534#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
535 M(ArrayGet) \
536 M(ArraySet) \
537 M(DoubleConstant) \
Calin Juravle7c4954d2014-10-28 16:57:40 +0000538 M(Div) \
Alexandre Rames5319def2014-10-23 10:03:10 +0100539 M(FloatConstant) \
540 M(Mul) \
541 M(Neg) \
542 M(NewArray) \
543 M(ParallelMove)
544
545#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
546
547enum UnimplementedInstructionBreakCode {
548#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
549 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
550#undef ENUM_UNIMPLEMENTED_INSTRUCTION
551};
552
553#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
554 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
555 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
556 } \
557 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
558 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
559 locations->SetOut(Location::Any()); \
560 }
561 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
562#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
563
564#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
565
566void LocationsBuilderARM64::HandleAddSub(HBinaryOperation* instr) {
567 DCHECK(instr->IsAdd() || instr->IsSub());
568 DCHECK_EQ(instr->InputCount(), 2U);
569 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
570 Primitive::Type type = instr->GetResultType();
571 switch (type) {
572 case Primitive::kPrimInt:
573 case Primitive::kPrimLong: {
574 locations->SetInAt(0, Location::RequiresRegister());
575 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
576 locations->SetOut(Location::RequiresRegister());
577 break;
578 }
579 case Primitive::kPrimBoolean:
580 case Primitive::kPrimByte:
581 case Primitive::kPrimChar:
582 case Primitive::kPrimShort:
583 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
584 break;
585 default:
586 LOG(FATAL) << "Unimplemented " << instr->DebugName() << " type " << type;
587 }
588}
589
590void InstructionCodeGeneratorARM64::HandleAddSub(HBinaryOperation* instr) {
591 DCHECK(instr->IsAdd() || instr->IsSub());
592
593 Primitive::Type type = instr->GetType();
594 Register dst = OutputRegister(instr);
595 Register lhs = InputRegisterAt(instr, 0);
596 Operand rhs = InputOperandAt(instr, 1);
597
598 switch (type) {
599 case Primitive::kPrimInt:
600 case Primitive::kPrimLong:
601 if (instr->IsAdd()) {
602 __ Add(dst, lhs, rhs);
603 } else {
604 __ Sub(dst, lhs, rhs);
605 }
606 break;
607
608 case Primitive::kPrimBoolean:
609 case Primitive::kPrimByte:
610 case Primitive::kPrimChar:
611 case Primitive::kPrimShort:
612 LOG(FATAL) << "Unexpected add/sub type " << type;
613 break;
614 default:
615 LOG(FATAL) << "Unimplemented add/sub type " << type;
616 }
617}
618
619void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
620 HandleAddSub(instruction);
621}
622
623void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
624 HandleAddSub(instruction);
625}
626
627void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
628 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
629 locations->SetInAt(0, Location::RequiresRegister());
630 locations->SetOut(Location::RequiresRegister());
631}
632
633void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
634 __ Ldr(OutputRegister(instruction),
635 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
636}
637
638void LocationsBuilderARM64::VisitCompare(HCompare* instruction) {
639 LocationSummary* locations =
640 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
641 locations->SetInAt(0, Location::RequiresRegister());
642 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
643 locations->SetOut(Location::RequiresRegister());
644}
645
646void InstructionCodeGeneratorARM64::VisitCompare(HCompare* instruction) {
647 Primitive::Type in_type = instruction->InputAt(0)->GetType();
648
649 DCHECK_EQ(in_type, Primitive::kPrimLong);
650 switch (in_type) {
651 case Primitive::kPrimLong: {
652 vixl::Label done;
653 Register result = OutputRegister(instruction);
654 Register left = InputRegisterAt(instruction, 0);
655 Operand right = InputOperandAt(instruction, 1);
656 __ Subs(result, left, right);
657 __ B(eq, &done);
658 __ Mov(result, 1);
659 __ Cneg(result, result, le);
660 __ Bind(&done);
661 break;
662 }
663 default:
664 LOG(FATAL) << "Unimplemented compare type " << in_type;
665 }
666}
667
668void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
669 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
670 locations->SetInAt(0, Location::RequiresRegister());
671 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
672 if (instruction->NeedsMaterialization()) {
673 locations->SetOut(Location::RequiresRegister());
674 }
675}
676
677void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
678 if (!instruction->NeedsMaterialization()) {
679 return;
680 }
681
682 LocationSummary* locations = instruction->GetLocations();
683 Register lhs = InputRegisterAt(instruction, 0);
684 Operand rhs = InputOperandAt(instruction, 1);
685 Register res = RegisterFrom(locations->Out(), instruction->GetType());
686 Condition cond = ARM64Condition(instruction->GetCondition());
687
688 __ Cmp(lhs, rhs);
689 __ Csel(res, vixl::Assembler::AppropriateZeroRegFor(res), Operand(1), InvertCondition(cond));
690}
691
692#define FOR_EACH_CONDITION_INSTRUCTION(M) \
693 M(Equal) \
694 M(NotEqual) \
695 M(LessThan) \
696 M(LessThanOrEqual) \
697 M(GreaterThan) \
698 M(GreaterThanOrEqual)
699#define DEFINE_CONDITION_VISITORS(Name) \
700void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
701void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
702FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
703#undef FOR_EACH_CONDITION_INSTRUCTION
704
705void LocationsBuilderARM64::VisitExit(HExit* exit) {
706 exit->SetLocations(nullptr);
707}
708
709void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
710 if (kIsDebugBuild) {
711 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
712 __ Brk(0); // TODO: Introduce special markers for such code locations.
713 }
714}
715
716void LocationsBuilderARM64::VisitGoto(HGoto* got) {
717 got->SetLocations(nullptr);
718}
719
720void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
721 HBasicBlock* successor = got->GetSuccessor();
722 // TODO: Support for suspend checks emission.
723 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
724 __ B(codegen_->GetLabelOf(successor));
725 }
726}
727
728void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
729 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
730 HInstruction* cond = if_instr->InputAt(0);
731 DCHECK(cond->IsCondition());
732 if (cond->AsCondition()->NeedsMaterialization()) {
733 locations->SetInAt(0, Location::RequiresRegister());
734 }
735}
736
737void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
738 HInstruction* cond = if_instr->InputAt(0);
739 DCHECK(cond->IsCondition());
740 HCondition* condition = cond->AsCondition();
741 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
742 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
743
744 // TODO: Support constant condition input in VisitIf.
745
746 if (condition->NeedsMaterialization()) {
747 // The condition instruction has been materialized, compare the output to 0.
748 Location cond_val = if_instr->GetLocations()->InAt(0);
749 DCHECK(cond_val.IsRegister());
750 __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
751
752 } else {
753 // The condition instruction has not been materialized, use its inputs as
754 // the comparison and its condition as the branch condition.
755 Register lhs = InputRegisterAt(condition, 0);
756 Operand rhs = InputOperandAt(condition, 1);
757 Condition cond = ARM64Condition(condition->GetCondition());
758 if ((cond == eq || cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
759 if (cond == eq) {
760 __ Cbz(lhs, true_target);
761 } else {
762 __ Cbnz(lhs, true_target);
763 }
764 } else {
765 __ Cmp(lhs, rhs);
766 __ B(cond, true_target);
767 }
768 }
769
770 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
771 __ B(false_target);
772 }
773}
774
775void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
776 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
777 locations->SetInAt(0, Location::RequiresRegister());
778 locations->SetOut(Location::RequiresRegister());
779}
780
781void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
782 Primitive::Type res_type = instruction->GetType();
783 Register res = OutputRegister(instruction);
784 Register obj = InputRegisterAt(instruction, 0);
785 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
786
787 switch (res_type) {
788 case Primitive::kPrimBoolean: {
789 __ Ldrb(res, MemOperand(obj, offset));
790 break;
791 }
792 case Primitive::kPrimByte: {
793 __ Ldrsb(res, MemOperand(obj, offset));
794 break;
795 }
796 case Primitive::kPrimShort: {
797 __ Ldrsh(res, MemOperand(obj, offset));
798 break;
799 }
800 case Primitive::kPrimChar: {
801 __ Ldrh(res, MemOperand(obj, offset));
802 break;
803 }
804 case Primitive::kPrimInt:
805 case Primitive::kPrimNot:
806 case Primitive::kPrimLong: { // TODO: support volatile.
807 DCHECK(res.IsX() == (res_type == Primitive::kPrimLong));
808 __ Ldr(res, MemOperand(obj, offset));
809 break;
810 }
811
812 case Primitive::kPrimFloat:
813 case Primitive::kPrimDouble:
814 LOG(FATAL) << "Unimplemented register res_type " << res_type;
815 break;
816
817 case Primitive::kPrimVoid:
818 LOG(FATAL) << "Unreachable res_type " << res_type;
819 }
820}
821
822void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
823 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
824 locations->SetInAt(0, Location::RequiresRegister());
825 locations->SetInAt(1, Location::RequiresRegister());
826}
827
828void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
829 Register obj = InputRegisterAt(instruction, 0);
830 Register value = InputRegisterAt(instruction, 1);
831 Primitive::Type field_type = instruction->InputAt(1)->GetType();
832 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
833
834 switch (field_type) {
835 case Primitive::kPrimBoolean:
836 case Primitive::kPrimByte: {
837 __ Strb(value, MemOperand(obj, offset));
838 break;
839 }
840
841 case Primitive::kPrimShort:
842 case Primitive::kPrimChar: {
843 __ Strh(value, MemOperand(obj, offset));
844 break;
845 }
846
847 case Primitive::kPrimInt:
848 case Primitive::kPrimNot:
849 case Primitive::kPrimLong: {
850 DCHECK(value.IsX() == (field_type == Primitive::kPrimLong));
851 __ Str(value, MemOperand(obj, offset));
852
853 if (field_type == Primitive::kPrimNot) {
854 codegen_->MarkGCCard(obj, value);
855 }
856 break;
857 }
858
859 case Primitive::kPrimFloat:
860 case Primitive::kPrimDouble:
861 LOG(FATAL) << "Unimplemented register type " << field_type;
862 break;
863
864 case Primitive::kPrimVoid:
865 LOG(FATAL) << "Unreachable type " << field_type;
866 }
867}
868
869void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
870 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
871 locations->SetOut(Location::ConstantLocation(constant));
872}
873
874void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
875 // Will be generated at use site.
876}
877
878void LocationsBuilderARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
879 HandleInvoke(invoke);
880}
881
882void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
883 HandleInvoke(invoke);
884}
885
886void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
887 LocationSummary* locations =
888 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
889 locations->AddTemp(LocationFrom(x0));
890
891 InvokeDexCallingConventionVisitor calling_convention_visitor;
892 for (size_t i = 0; i < invoke->InputCount(); i++) {
893 HInstruction* input = invoke->InputAt(i);
894 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
895 }
896
897 Primitive::Type return_type = invoke->GetType();
898 if (return_type != Primitive::kPrimVoid) {
899 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
900 }
901}
902
903void InstructionCodeGeneratorARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
904 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
905 // Make sure that ArtMethod* is passed in W0 as per the calling convention
906 DCHECK(temp.Is(w0));
907 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
908 invoke->GetIndexInDexCache() * kHeapRefSize;
909
910 // TODO: Implement all kinds of calls:
911 // 1) boot -> boot
912 // 2) app -> boot
913 // 3) app -> app
914 //
915 // Currently we implement the app -> app logic, which looks up in the resolve cache.
916
917 // temp = method;
918 __ Ldr(temp, MemOperand(sp, kCurrentMethodStackOffset));
919 // temp = temp->dex_cache_resolved_methods_;
920 __ Ldr(temp, MemOperand(temp.X(), mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
921 // temp = temp[index_in_cache];
922 __ Ldr(temp, MemOperand(temp.X(), index_in_cache));
923 // lr = temp->entry_point_from_quick_compiled_code_;
924 __ Ldr(lr, MemOperand(temp.X(), mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
925 // lr();
926 __ Blr(lr);
927
928 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
929 DCHECK(!codegen_->IsLeafMethod());
930}
931
932void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
933 LocationSummary* locations = invoke->GetLocations();
934 Location receiver = locations->InAt(0);
935 Register temp = XRegisterFrom(invoke->GetLocations()->GetTemp(0));
936 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
937 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
938 Offset class_offset = mirror::Object::ClassOffset();
939 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset();
940
941 // temp = object->GetClass();
942 if (receiver.IsStackSlot()) {
943 __ Ldr(temp.W(), MemOperand(sp, receiver.GetStackIndex()));
944 __ Ldr(temp.W(), MemOperand(temp, class_offset.SizeValue()));
945 } else {
946 DCHECK(receiver.IsRegister());
947 __ Ldr(temp.W(), HeapOperandFrom(receiver, Primitive::kPrimNot,
948 class_offset));
949 }
950 // temp = temp->GetMethodAt(method_offset);
951 __ Ldr(temp.W(), MemOperand(temp, method_offset));
952 // lr = temp->GetEntryPoint();
953 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
954 // lr();
955 __ Blr(lr);
956 DCHECK(!codegen_->IsLeafMethod());
957 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
958}
959
960void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
961 load->SetLocations(nullptr);
962}
963
964void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
965 // Nothing to do, this is driven by the code generator.
966}
967
968void LocationsBuilderARM64::VisitLocal(HLocal* local) {
969 local->SetLocations(nullptr);
970}
971
972void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
973 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
974}
975
976void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
977 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
978 locations->SetOut(Location::ConstantLocation(constant));
979}
980
981void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
982 // Will be generated at use site.
983}
984
985void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
986 LocationSummary* locations =
987 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
988 InvokeRuntimeCallingConvention calling_convention;
989 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
990 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
991 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
992}
993
994void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
995 LocationSummary* locations = instruction->GetLocations();
996 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
997 DCHECK(type_index.Is(w0));
998 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
999 DCHECK(current_method.Is(w1));
1000 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1001 __ Mov(type_index, instruction->GetTypeIndex());
1002 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocObjectWithAccessCheck).Int32Value()));
1003 __ Blr(lr);
1004 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1005 DCHECK(!codegen_->IsLeafMethod());
1006}
1007
1008void LocationsBuilderARM64::VisitNot(HNot* instruction) {
1009 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1010 locations->SetInAt(0, Location::RequiresRegister());
1011 locations->SetOut(Location::RequiresRegister());
1012}
1013
1014void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
1015 switch (instruction->InputAt(0)->GetType()) {
1016 case Primitive::kPrimBoolean:
1017 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1));
1018 break;
1019
1020 case Primitive::kPrimInt:
1021 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
1022 break;
1023
1024 case Primitive::kPrimLong:
1025 LOG(FATAL) << "Not yet implemented type for not operation " << instruction->GetResultType();
1026 break;
1027
1028 default:
1029 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
1030 }
1031}
1032
1033void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
1034 LocationSummary* locations =
1035 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1036 locations->SetInAt(0, Location::RequiresRegister());
1037 if (instruction->HasUses()) {
1038 locations->SetOut(Location::SameAsFirstInput());
1039 }
1040}
1041
1042void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
1043 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
1044 codegen_->AddSlowPath(slow_path);
1045
1046 LocationSummary* locations = instruction->GetLocations();
1047 Location obj = locations->InAt(0);
1048 if (obj.IsRegister()) {
1049 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
1050 } else {
1051 DCHECK(obj.IsConstant()) << obj;
1052 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1053 __ B(slow_path->GetEntryLabel());
1054 }
1055}
1056
1057void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
1058 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1059 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1060 if (location.IsStackSlot()) {
1061 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1062 } else if (location.IsDoubleStackSlot()) {
1063 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1064 }
1065 locations->SetOut(location);
1066}
1067
1068void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
1069 // Nothing to do, the parameter is already at its location.
1070}
1071
1072void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
1073 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1074 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1075 locations->SetInAt(i, Location::Any());
1076 }
1077 locations->SetOut(Location::Any());
1078}
1079
1080void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
1081 LOG(FATAL) << "Unreachable";
1082}
1083
1084void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
1085 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1086 Primitive::Type return_type = instruction->InputAt(0)->GetType();
1087
1088 if (return_type == Primitive::kPrimFloat || return_type == Primitive::kPrimDouble) {
1089 LOG(FATAL) << "Unimplemented return type " << return_type;
1090 }
1091
1092 locations->SetInAt(0, LocationFrom(x0));
1093}
1094
1095void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
1096 if (kIsDebugBuild) {
1097 Primitive::Type type = instruction->InputAt(0)->GetType();
1098 switch (type) {
1099 case Primitive::kPrimBoolean:
1100 case Primitive::kPrimByte:
1101 case Primitive::kPrimChar:
1102 case Primitive::kPrimShort:
1103 case Primitive::kPrimInt:
1104 case Primitive::kPrimNot:
1105 DCHECK(InputRegisterAt(instruction, 0).Is(w0));
1106 break;
1107
1108 case Primitive::kPrimLong:
1109 DCHECK(InputRegisterAt(instruction, 0).Is(x0));
1110 break;
1111
1112 default:
1113 LOG(FATAL) << "Unimplemented return type " << type;
1114 }
1115 }
1116 codegen_->GenerateFrameExit();
1117 __ Br(lr);
1118}
1119
1120void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
1121 instruction->SetLocations(nullptr);
1122}
1123
1124void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
1125 codegen_->GenerateFrameExit();
1126 __ Br(lr);
1127}
1128
1129void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
1130 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
1131 Primitive::Type field_type = store->InputAt(1)->GetType();
1132 switch (field_type) {
1133 case Primitive::kPrimBoolean:
1134 case Primitive::kPrimByte:
1135 case Primitive::kPrimChar:
1136 case Primitive::kPrimShort:
1137 case Primitive::kPrimInt:
1138 case Primitive::kPrimNot:
1139 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1140 break;
1141
1142 case Primitive::kPrimLong:
1143 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1144 break;
1145
1146 default:
1147 LOG(FATAL) << "Unimplemented local type " << field_type;
1148 }
1149}
1150
1151void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
1152}
1153
1154void LocationsBuilderARM64::VisitSub(HSub* instruction) {
1155 HandleAddSub(instruction);
1156}
1157
1158void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
1159 HandleAddSub(instruction);
1160}
1161
1162void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1163 LocationSummary* locations =
1164 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1165 locations->SetInAt(0, Location::RequiresRegister());
1166 locations->SetInAt(1, Location::RequiresRegister());
1167 if (instruction->HasUses()) {
1168 locations->SetOut(Location::SameAsFirstInput());
1169 }
1170}
1171
1172void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1173 LocationSummary* locations = instruction->GetLocations();
1174 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1175 instruction, locations->InAt(0), locations->InAt(1));
1176 codegen_->AddSlowPath(slow_path);
1177
1178 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1179 __ B(slow_path->GetEntryLabel(), hs);
1180}
1181
1182void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1183 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
1184}
1185
1186void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1187 // TODO: Improve support for suspend checks.
1188 SuspendCheckSlowPathARM64* slow_path =
1189 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, nullptr);
1190 codegen_->AddSlowPath(slow_path);
1191
1192 __ Subs(wSuspend, wSuspend, 1);
1193 __ B(slow_path->GetEntryLabel(), le);
1194 __ Bind(slow_path->GetReturnLabel());
1195}
1196
1197void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
1198 temp->SetLocations(nullptr);
1199}
1200
1201void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
1202 // Nothing to do, this is driven by the code generator.
1203}
1204
1205} // namespace arm64
1206} // namespace art