blob: 59f4cc27beaaa80e22fb034609de92db61cd7f35 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080020#include "common_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010021#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080022#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080024#include "intrinsics.h"
25#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010026#include "mirror/array-inl.h"
27#include "mirror/art_method.h"
28#include "mirror/class.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000029#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010030#include "thread.h"
31#include "utils/arm64/assembler_arm64.h"
32#include "utils/assembler.h"
33#include "utils/stack_checks.h"
34
35
36using namespace vixl; // NOLINT(build/namespaces)
37
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
44namespace arm64 {
45
Andreas Gampe878d58c2015-01-15 23:24:00 -080046using helpers::CPURegisterFrom;
47using helpers::DRegisterFrom;
48using helpers::FPRegisterFrom;
49using helpers::HeapOperand;
50using helpers::HeapOperandFrom;
51using helpers::InputCPURegisterAt;
52using helpers::InputFPRegisterAt;
53using helpers::InputRegisterAt;
54using helpers::InputOperandAt;
55using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080056using helpers::LocationFrom;
57using helpers::OperandFromMemOperand;
58using helpers::OutputCPURegister;
59using helpers::OutputFPRegister;
60using helpers::OutputRegister;
61using helpers::RegisterFrom;
62using helpers::StackOperandFrom;
63using helpers::VIXLRegCodeFromART;
64using helpers::WRegisterFrom;
65using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000066using helpers::ARM64EncodableConstantOrRegister;
Andreas Gampe878d58c2015-01-15 23:24:00 -080067
Alexandre Rames5319def2014-10-23 10:03:10 +010068static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
69static constexpr int kCurrentMethodStackOffset = 0;
70
Alexandre Rames5319def2014-10-23 10:03:10 +010071inline Condition ARM64Condition(IfCondition cond) {
72 switch (cond) {
73 case kCondEQ: return eq;
74 case kCondNE: return ne;
75 case kCondLT: return lt;
76 case kCondLE: return le;
77 case kCondGT: return gt;
78 case kCondGE: return ge;
79 default:
80 LOG(FATAL) << "Unknown if condition";
81 }
82 return nv; // Unreachable.
83}
84
Alexandre Ramesa89086e2014-11-07 17:13:25 +000085Location ARM64ReturnLocation(Primitive::Type return_type) {
86 DCHECK_NE(return_type, Primitive::kPrimVoid);
87 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
88 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
89 // but we use the exact registers for clarity.
90 if (return_type == Primitive::kPrimFloat) {
91 return LocationFrom(s0);
92 } else if (return_type == Primitive::kPrimDouble) {
93 return LocationFrom(d0);
94 } else if (return_type == Primitive::kPrimLong) {
95 return LocationFrom(x0);
96 } else {
97 return LocationFrom(w0);
98 }
99}
100
Alexandre Rames5319def2014-10-23 10:03:10 +0100101Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000102 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100103}
104
Alexandre Rames67555f72014-11-18 10:55:16 +0000105#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
106#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100107
Alexandre Rames5319def2014-10-23 10:03:10 +0100108class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
109 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000110 BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
111 Location index_location,
112 Location length_location)
113 : instruction_(instruction),
114 index_location_(index_location),
115 length_location_(length_location) {}
116
Alexandre Rames5319def2014-10-23 10:03:10 +0100117
Alexandre Rames67555f72014-11-18 10:55:16 +0000118 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000119 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100120 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000121 // We're moving two locations to locations that could overlap, so we need a parallel
122 // move resolver.
123 InvokeRuntimeCallingConvention calling_convention;
124 codegen->EmitParallelMoves(
Nicolas Geoffray90218252015-04-15 11:56:51 +0100125 index_location_, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
126 length_location_, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000127 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000128 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800129 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100130 }
131
132 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000133 HBoundsCheck* const instruction_;
134 const Location index_location_;
135 const Location length_location_;
136
Alexandre Rames5319def2014-10-23 10:03:10 +0100137 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
138};
139
Alexandre Rames67555f72014-11-18 10:55:16 +0000140class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
141 public:
142 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
143
144 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
145 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
146 __ Bind(GetEntryLabel());
147 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000148 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800149 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000150 }
151
152 private:
153 HDivZeroCheck* const instruction_;
154 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
155};
156
157class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
158 public:
159 LoadClassSlowPathARM64(HLoadClass* cls,
160 HInstruction* at,
161 uint32_t dex_pc,
162 bool do_clinit)
163 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
164 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
165 }
166
167 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
168 LocationSummary* locations = at_->GetLocations();
169 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
170
171 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000172 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000173
174 InvokeRuntimeCallingConvention calling_convention;
175 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
176 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
177 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
178 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000179 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800180 if (do_clinit_) {
181 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t, mirror::ArtMethod*>();
182 } else {
183 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t, mirror::ArtMethod*>();
184 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000185
186 // Move the class to the desired location.
187 Location out = locations->Out();
188 if (out.IsValid()) {
189 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
190 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000191 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000192 }
193
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000194 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000195 __ B(GetExitLabel());
196 }
197
198 private:
199 // The class this slow path will load.
200 HLoadClass* const cls_;
201
202 // The instruction where this slow path is happening.
203 // (Might be the load class or an initialization check).
204 HInstruction* const at_;
205
206 // The dex PC of `at_`.
207 const uint32_t dex_pc_;
208
209 // Whether to initialize the class.
210 const bool do_clinit_;
211
212 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
213};
214
215class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
216 public:
217 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
218
219 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
220 LocationSummary* locations = instruction_->GetLocations();
221 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
222 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
223
224 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000225 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000226
227 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800228 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
229 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000230 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000231 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800232 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000233 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000234 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000235
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000236 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000237 __ B(GetExitLabel());
238 }
239
240 private:
241 HLoadString* const instruction_;
242
243 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
244};
245
Alexandre Rames5319def2014-10-23 10:03:10 +0100246class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
247 public:
248 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
249
Alexandre Rames67555f72014-11-18 10:55:16 +0000250 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
251 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100252 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000253 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000254 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800255 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100256 }
257
258 private:
259 HNullCheck* const instruction_;
260
261 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
262};
263
264class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
265 public:
266 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
267 HBasicBlock* successor)
268 : instruction_(instruction), successor_(successor) {}
269
Alexandre Rames67555f72014-11-18 10:55:16 +0000270 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
271 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100272 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000273 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000274 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000275 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800276 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000277 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000278 if (successor_ == nullptr) {
279 __ B(GetReturnLabel());
280 } else {
281 __ B(arm64_codegen->GetLabelOf(successor_));
282 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100283 }
284
285 vixl::Label* GetReturnLabel() {
286 DCHECK(successor_ == nullptr);
287 return &return_label_;
288 }
289
Alexandre Rames5319def2014-10-23 10:03:10 +0100290 private:
291 HSuspendCheck* const instruction_;
292 // If not null, the block to branch to after the suspend check.
293 HBasicBlock* const successor_;
294
295 // If `successor_` is null, the label to branch to after the suspend check.
296 vixl::Label return_label_;
297
298 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
299};
300
Alexandre Rames67555f72014-11-18 10:55:16 +0000301class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
302 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000303 TypeCheckSlowPathARM64(HInstruction* instruction,
304 Location class_to_check,
305 Location object_class,
306 uint32_t dex_pc)
307 : instruction_(instruction),
308 class_to_check_(class_to_check),
309 object_class_(object_class),
310 dex_pc_(dex_pc) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000311
312 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000313 LocationSummary* locations = instruction_->GetLocations();
314 DCHECK(instruction_->IsCheckCast()
315 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
316 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
317
Alexandre Rames67555f72014-11-18 10:55:16 +0000318 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000319 SaveLiveRegisters(codegen, locations);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000320
321 // We're moving two locations to locations that could overlap, so we need a parallel
322 // move resolver.
323 InvokeRuntimeCallingConvention calling_convention;
324 codegen->EmitParallelMoves(
Nicolas Geoffray90218252015-04-15 11:56:51 +0100325 class_to_check_, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
326 object_class_, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000327
328 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000329 arm64_codegen->InvokeRuntime(
330 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_, this);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000331 Primitive::Type ret_type = instruction_->GetType();
332 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
333 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800334 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
335 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000336 } else {
337 DCHECK(instruction_->IsCheckCast());
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000338 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800339 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000340 }
341
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000342 RestoreLiveRegisters(codegen, locations);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000343 __ B(GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000344 }
345
346 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000347 HInstruction* const instruction_;
348 const Location class_to_check_;
349 const Location object_class_;
350 uint32_t dex_pc_;
351
Alexandre Rames67555f72014-11-18 10:55:16 +0000352 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
353};
354
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700355class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
356 public:
357 explicit DeoptimizationSlowPathARM64(HInstruction* instruction)
358 : instruction_(instruction) {}
359
360 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
361 __ Bind(GetEntryLabel());
362 SaveLiveRegisters(codegen, instruction_->GetLocations());
363 DCHECK(instruction_->IsDeoptimize());
364 HDeoptimize* deoptimize = instruction_->AsDeoptimize();
365 uint32_t dex_pc = deoptimize->GetDexPc();
366 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
367 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize), instruction_, dex_pc, this);
368 }
369
370 private:
371 HInstruction* const instruction_;
372 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
373};
374
Alexandre Rames5319def2014-10-23 10:03:10 +0100375#undef __
376
377Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
378 Location next_location;
379 if (type == Primitive::kPrimVoid) {
380 LOG(FATAL) << "Unreachable type " << type;
381 }
382
Alexandre Rames542361f2015-01-29 16:57:31 +0000383 if (Primitive::IsFloatingPointType(type) &&
384 (fp_index_ < calling_convention.GetNumberOfFpuRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000385 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(fp_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000386 } else if (!Primitive::IsFloatingPointType(type) &&
387 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000388 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
389 } else {
390 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000391 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
392 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100393 }
394
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000395 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000396 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100397 return next_location;
398}
399
Serban Constantinescu579885a2015-02-22 20:51:33 +0000400CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
401 const Arm64InstructionSetFeatures& isa_features,
402 const CompilerOptions& compiler_options)
Alexandre Rames5319def2014-10-23 10:03:10 +0100403 : CodeGenerator(graph,
404 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000405 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000406 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000407 callee_saved_core_registers.list(),
408 callee_saved_fp_registers.list(),
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000409 compiler_options),
Alexandre Rames5319def2014-10-23 10:03:10 +0100410 block_labels_(nullptr),
411 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000412 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000413 move_resolver_(graph->GetArena(), this),
414 isa_features_(isa_features) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000415 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000416 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000417}
Alexandre Rames5319def2014-10-23 10:03:10 +0100418
Alexandre Rames67555f72014-11-18 10:55:16 +0000419#undef __
420#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100421
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000422void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
423 // Ensure we emit the literal pool.
424 __ FinalizeCode();
425 CodeGenerator::Finalize(allocator);
426}
427
Zheng Xuad4450e2015-04-17 18:48:56 +0800428void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
429 // Note: There are 6 kinds of moves:
430 // 1. constant -> GPR/FPR (non-cycle)
431 // 2. constant -> stack (non-cycle)
432 // 3. GPR/FPR -> GPR/FPR
433 // 4. GPR/FPR -> stack
434 // 5. stack -> GPR/FPR
435 // 6. stack -> stack (non-cycle)
436 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
437 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
438 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
439 // dependency.
440 vixl_temps_.Open(GetVIXLAssembler());
441}
442
443void ParallelMoveResolverARM64::FinishEmitNativeCode() {
444 vixl_temps_.Close();
445}
446
447Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
448 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
449 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
450 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
451 Location scratch = GetScratchLocation(kind);
452 if (!scratch.Equals(Location::NoLocation())) {
453 return scratch;
454 }
455 // Allocate from VIXL temp registers.
456 if (kind == Location::kRegister) {
457 scratch = LocationFrom(vixl_temps_.AcquireX());
458 } else {
459 DCHECK(kind == Location::kFpuRegister);
460 scratch = LocationFrom(vixl_temps_.AcquireD());
461 }
462 AddScratchLocation(scratch);
463 return scratch;
464}
465
466void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
467 if (loc.IsRegister()) {
468 vixl_temps_.Release(XRegisterFrom(loc));
469 } else {
470 DCHECK(loc.IsFpuRegister());
471 vixl_temps_.Release(DRegisterFrom(loc));
472 }
473 RemoveScratchLocation(loc);
474}
475
Alexandre Rames3e69f162014-12-10 10:36:50 +0000476void ParallelMoveResolverARM64::EmitMove(size_t index) {
477 MoveOperands* move = moves_.Get(index);
478 codegen_->MoveLocation(move->GetDestination(), move->GetSource());
479}
480
Alexandre Rames5319def2014-10-23 10:03:10 +0100481void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100482 MacroAssembler* masm = GetVIXLAssembler();
483 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000484 __ Bind(&frame_entry_label_);
485
Serban Constantinescu02164b32014-11-13 14:05:07 +0000486 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
487 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100488 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000489 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000490 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000491 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000492 __ Ldr(wzr, MemOperand(temp, 0));
493 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000494 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100495
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000496 if (!HasEmptyFrame()) {
497 int frame_size = GetFrameSize();
498 // Stack layout:
499 // sp[frame_size - 8] : lr.
500 // ... : other preserved core registers.
501 // ... : other preserved fp registers.
502 // ... : reserved frame space.
503 // sp[0] : current method.
504 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100505 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +0800506 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
507 frame_size - GetCoreSpillSize());
508 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
509 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000510 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100511}
512
513void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100514 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +0100515 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000516 if (!HasEmptyFrame()) {
517 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +0800518 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
519 frame_size - FrameEntrySpillSize());
520 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
521 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000522 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100523 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000524 }
David Srbeckyc34dc932015-04-12 09:27:43 +0100525 __ Ret();
526 GetAssembler()->cfi().RestoreState();
527 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +0100528}
529
530void CodeGeneratorARM64::Bind(HBasicBlock* block) {
531 __ Bind(GetLabelOf(block));
532}
533
Alexandre Rames5319def2014-10-23 10:03:10 +0100534void CodeGeneratorARM64::Move(HInstruction* instruction,
535 Location location,
536 HInstruction* move_for) {
537 LocationSummary* locations = instruction->GetLocations();
538 if (locations != nullptr && locations->Out().Equals(location)) {
539 return;
540 }
541
542 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000543 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100544
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000545 if (instruction->IsIntConstant()
546 || instruction->IsLongConstant()
547 || instruction->IsNullConstant()) {
548 int64_t value = GetInt64ValueOf(instruction->AsConstant());
Alexandre Rames5319def2014-10-23 10:03:10 +0100549 if (location.IsRegister()) {
550 Register dst = RegisterFrom(location, type);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000551 DCHECK(((instruction->IsIntConstant() || instruction->IsNullConstant()) && dst.Is32Bits()) ||
Alexandre Rames5319def2014-10-23 10:03:10 +0100552 (instruction->IsLongConstant() && dst.Is64Bits()));
553 __ Mov(dst, value);
554 } else {
555 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000556 UseScratchRegisterScope temps(GetVIXLAssembler());
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000557 Register temp = (instruction->IsIntConstant() || instruction->IsNullConstant())
558 ? temps.AcquireW()
559 : temps.AcquireX();
Alexandre Rames5319def2014-10-23 10:03:10 +0100560 __ Mov(temp, value);
561 __ Str(temp, StackOperandFrom(location));
562 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000563 } else if (instruction->IsTemporary()) {
564 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000565 MoveLocation(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100566 } else if (instruction->IsLoadLocal()) {
567 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Rames542361f2015-01-29 16:57:31 +0000568 if (Primitive::Is64BitType(type)) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000569 MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000570 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000571 MoveLocation(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100572 }
573
574 } else {
575 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000576 MoveLocation(location, locations->Out(), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100577 }
578}
579
Alexandre Rames5319def2014-10-23 10:03:10 +0100580Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
581 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000582
Alexandre Rames5319def2014-10-23 10:03:10 +0100583 switch (type) {
584 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000585 case Primitive::kPrimInt:
586 case Primitive::kPrimFloat:
587 return Location::StackSlot(GetStackSlot(load->GetLocal()));
588
589 case Primitive::kPrimLong:
590 case Primitive::kPrimDouble:
591 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
592
Alexandre Rames5319def2014-10-23 10:03:10 +0100593 case Primitive::kPrimBoolean:
594 case Primitive::kPrimByte:
595 case Primitive::kPrimChar:
596 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100597 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100598 LOG(FATAL) << "Unexpected type " << type;
599 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000600
Alexandre Rames5319def2014-10-23 10:03:10 +0100601 LOG(FATAL) << "Unreachable";
602 return Location::NoLocation();
603}
604
605void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000606 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100607 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000608 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +0100609 vixl::Label done;
610 __ Cbz(value, &done);
611 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
612 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000613 __ Strb(card, MemOperand(card, temp.X()));
Alexandre Rames5319def2014-10-23 10:03:10 +0100614 __ Bind(&done);
615}
616
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000617void CodeGeneratorARM64::SetupBlockedRegisters(bool is_baseline) const {
618 // Blocked core registers:
619 // lr : Runtime reserved.
620 // tr : Runtime reserved.
621 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
622 // ip1 : VIXL core temp.
623 // ip0 : VIXL core temp.
624 //
625 // Blocked fp registers:
626 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +0100627 CPURegList reserved_core_registers = vixl_reserved_core_registers;
628 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100629 while (!reserved_core_registers.IsEmpty()) {
630 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
631 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000632
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000633 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +0800634 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000635 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
636 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000637
638 if (is_baseline) {
639 CPURegList reserved_core_baseline_registers = callee_saved_core_registers;
640 while (!reserved_core_baseline_registers.IsEmpty()) {
641 blocked_core_registers_[reserved_core_baseline_registers.PopLowestIndex().code()] = true;
642 }
643
644 CPURegList reserved_fp_baseline_registers = callee_saved_fp_registers;
645 while (!reserved_fp_baseline_registers.IsEmpty()) {
646 blocked_fpu_registers_[reserved_fp_baseline_registers.PopLowestIndex().code()] = true;
647 }
648 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100649}
650
651Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
652 if (type == Primitive::kPrimVoid) {
653 LOG(FATAL) << "Unreachable type " << type;
654 }
655
Alexandre Rames542361f2015-01-29 16:57:31 +0000656 if (Primitive::IsFloatingPointType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000657 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
658 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100659 return Location::FpuRegisterLocation(reg);
660 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000661 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
662 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100663 return Location::RegisterLocation(reg);
664 }
665}
666
Alexandre Rames3e69f162014-12-10 10:36:50 +0000667size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
668 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
669 __ Str(reg, MemOperand(sp, stack_index));
670 return kArm64WordSize;
671}
672
673size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
674 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
675 __ Ldr(reg, MemOperand(sp, stack_index));
676 return kArm64WordSize;
677}
678
679size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
680 FPRegister reg = FPRegister(reg_id, kDRegSize);
681 __ Str(reg, MemOperand(sp, stack_index));
682 return kArm64WordSize;
683}
684
685size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
686 FPRegister reg = FPRegister(reg_id, kDRegSize);
687 __ Ldr(reg, MemOperand(sp, stack_index));
688 return kArm64WordSize;
689}
690
Alexandre Rames5319def2014-10-23 10:03:10 +0100691void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
692 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
693}
694
695void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
696 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
697}
698
Alexandre Rames67555f72014-11-18 10:55:16 +0000699void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000700 if (constant->IsIntConstant()) {
701 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
702 } else if (constant->IsLongConstant()) {
703 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
704 } else if (constant->IsNullConstant()) {
705 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +0000706 } else if (constant->IsFloatConstant()) {
707 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
708 } else {
709 DCHECK(constant->IsDoubleConstant());
710 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
711 }
712}
713
Alexandre Rames3e69f162014-12-10 10:36:50 +0000714
715static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
716 DCHECK(constant.IsConstant());
717 HConstant* cst = constant.GetConstant();
718 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000719 // Null is mapped to a core W register, which we associate with kPrimInt.
720 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +0000721 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
722 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
723 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
724}
725
726void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000727 if (source.Equals(destination)) {
728 return;
729 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000730
731 // A valid move can always be inferred from the destination and source
732 // locations. When moving from and to a register, the argument type can be
733 // used to generate 32bit instead of 64bit moves. In debug mode we also
734 // checks the coherency of the locations and the type.
735 bool unspecified_type = (type == Primitive::kPrimVoid);
736
737 if (destination.IsRegister() || destination.IsFpuRegister()) {
738 if (unspecified_type) {
739 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
740 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000741 (src_cst != nullptr && (src_cst->IsIntConstant()
742 || src_cst->IsFloatConstant()
743 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000744 // For stack slots and 32bit constants, a 64bit type is appropriate.
745 type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +0000746 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000747 // If the source is a double stack slot or a 64bit constant, a 64bit
748 // type is appropriate. Else the source is a register, and since the
749 // type has not been specified, we chose a 64bit type to force a 64bit
750 // move.
751 type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +0000752 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000753 }
Alexandre Rames542361f2015-01-29 16:57:31 +0000754 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(type)) ||
755 (destination.IsRegister() && !Primitive::IsFloatingPointType(type)));
Alexandre Rames3e69f162014-12-10 10:36:50 +0000756 CPURegister dst = CPURegisterFrom(destination, type);
757 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
758 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
759 __ Ldr(dst, StackOperandFrom(source));
760 } else if (source.IsConstant()) {
761 DCHECK(CoherentConstantAndType(source, type));
762 MoveConstant(dst, source.GetConstant());
763 } else {
764 if (destination.IsRegister()) {
765 __ Mov(Register(dst), RegisterFrom(source, type));
766 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +0800767 DCHECK(destination.IsFpuRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000768 __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
769 }
770 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000771 } else { // The destination is not a register. It must be a stack slot.
772 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
773 if (source.IsRegister() || source.IsFpuRegister()) {
774 if (unspecified_type) {
775 if (source.IsRegister()) {
776 type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
777 } else {
778 type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
779 }
780 }
Alexandre Rames542361f2015-01-29 16:57:31 +0000781 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(type)) &&
782 (source.IsFpuRegister() == Primitive::IsFloatingPointType(type)));
Alexandre Rames3e69f162014-12-10 10:36:50 +0000783 __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
784 } else if (source.IsConstant()) {
785 DCHECK(unspecified_type || CoherentConstantAndType(source, type));
786 UseScratchRegisterScope temps(GetVIXLAssembler());
787 HConstant* src_cst = source.GetConstant();
788 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000789 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000790 temp = temps.AcquireW();
791 } else if (src_cst->IsLongConstant()) {
792 temp = temps.AcquireX();
793 } else if (src_cst->IsFloatConstant()) {
794 temp = temps.AcquireS();
795 } else {
796 DCHECK(src_cst->IsDoubleConstant());
797 temp = temps.AcquireD();
798 }
799 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000800 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000801 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +0000802 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000803 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000804 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000805 // There is generally less pressure on FP registers.
806 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000807 __ Ldr(temp, StackOperandFrom(source));
808 __ Str(temp, StackOperandFrom(destination));
809 }
810 }
811}
812
813void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000814 CPURegister dst,
815 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000816 switch (type) {
817 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +0000818 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000819 break;
820 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +0000821 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000822 break;
823 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +0000824 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000825 break;
826 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +0000827 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000828 break;
829 case Primitive::kPrimInt:
830 case Primitive::kPrimNot:
831 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000832 case Primitive::kPrimFloat:
833 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +0000834 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +0000835 __ Ldr(dst, src);
836 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000837 case Primitive::kPrimVoid:
838 LOG(FATAL) << "Unreachable type " << type;
839 }
840}
841
Calin Juravle77520bc2015-01-12 18:45:46 +0000842void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000843 CPURegister dst,
844 const MemOperand& src) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100845 MacroAssembler* masm = GetVIXLAssembler();
846 BlockPoolsScope block_pools(masm);
847 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000848 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +0000849 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000850
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000851 DCHECK(!src.IsPreIndex());
852 DCHECK(!src.IsPostIndex());
853
854 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800855 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000856 MemOperand base = MemOperand(temp_base);
857 switch (type) {
858 case Primitive::kPrimBoolean:
859 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000860 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000861 break;
862 case Primitive::kPrimByte:
863 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000864 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000865 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
866 break;
867 case Primitive::kPrimChar:
868 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000869 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000870 break;
871 case Primitive::kPrimShort:
872 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000873 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000874 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
875 break;
876 case Primitive::kPrimInt:
877 case Primitive::kPrimNot:
878 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +0000879 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000880 __ Ldar(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000881 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000882 break;
883 case Primitive::kPrimFloat:
884 case Primitive::kPrimDouble: {
885 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +0000886 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000887
888 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
889 __ Ldar(temp, base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000890 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000891 __ Fmov(FPRegister(dst), temp);
892 break;
893 }
894 case Primitive::kPrimVoid:
895 LOG(FATAL) << "Unreachable type " << type;
896 }
897}
898
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000899void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000900 CPURegister src,
901 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000902 switch (type) {
903 case Primitive::kPrimBoolean:
904 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000905 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000906 break;
907 case Primitive::kPrimChar:
908 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000909 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000910 break;
911 case Primitive::kPrimInt:
912 case Primitive::kPrimNot:
913 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000914 case Primitive::kPrimFloat:
915 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +0000916 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000917 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000918 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000919 case Primitive::kPrimVoid:
920 LOG(FATAL) << "Unreachable type " << type;
921 }
922}
923
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000924void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
925 CPURegister src,
926 const MemOperand& dst) {
927 UseScratchRegisterScope temps(GetVIXLAssembler());
928 Register temp_base = temps.AcquireX();
929
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000930 DCHECK(!dst.IsPreIndex());
931 DCHECK(!dst.IsPostIndex());
932
933 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800934 Operand op = OperandFromMemOperand(dst);
935 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000936 MemOperand base = MemOperand(temp_base);
937 switch (type) {
938 case Primitive::kPrimBoolean:
939 case Primitive::kPrimByte:
940 __ Stlrb(Register(src), base);
941 break;
942 case Primitive::kPrimChar:
943 case Primitive::kPrimShort:
944 __ Stlrh(Register(src), base);
945 break;
946 case Primitive::kPrimInt:
947 case Primitive::kPrimNot:
948 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +0000949 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000950 __ Stlr(Register(src), base);
951 break;
952 case Primitive::kPrimFloat:
953 case Primitive::kPrimDouble: {
954 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +0000955 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000956
957 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
958 __ Fmov(temp, FPRegister(src));
959 __ Stlr(temp, base);
960 break;
961 }
962 case Primitive::kPrimVoid:
963 LOG(FATAL) << "Unreachable type " << type;
964 }
965}
966
Alexandre Rames67555f72014-11-18 10:55:16 +0000967void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) {
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000968 DCHECK(RequiresCurrentMethod());
Alexandre Rames67555f72014-11-18 10:55:16 +0000969 DCHECK(current_method.IsW());
970 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
971}
972
973void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
974 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000975 uint32_t dex_pc,
976 SlowPathCode* slow_path) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100977 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +0000978 __ Ldr(lr, MemOperand(tr, entry_point_offset));
979 __ Blr(lr);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000980 if (instruction != nullptr) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000981 RecordPcInfo(instruction, dex_pc, slow_path);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000982 DCHECK(instruction->IsSuspendCheck()
983 || instruction->IsBoundsCheck()
984 || instruction->IsNullCheck()
985 || instruction->IsDivZeroCheck()
986 || !IsLeafMethod());
987 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000988}
989
990void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
991 vixl::Register class_reg) {
992 UseScratchRegisterScope temps(GetVIXLAssembler());
993 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000994 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
Serban Constantinescu579885a2015-02-22 20:51:33 +0000995 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000996
Serban Constantinescu02164b32014-11-13 14:05:07 +0000997 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu579885a2015-02-22 20:51:33 +0000998 if (use_acquire_release) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000999 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1000 __ Add(temp, class_reg, status_offset);
1001 __ Ldar(temp, HeapOperand(temp));
1002 __ Cmp(temp, mirror::Class::kStatusInitialized);
1003 __ B(lt, slow_path->GetEntryLabel());
1004 } else {
1005 __ Ldr(temp, HeapOperand(class_reg, status_offset));
1006 __ Cmp(temp, mirror::Class::kStatusInitialized);
1007 __ B(lt, slow_path->GetEntryLabel());
1008 __ Dmb(InnerShareable, BarrierReads);
1009 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001010 __ Bind(slow_path->GetExitLabel());
1011}
Alexandre Rames5319def2014-10-23 10:03:10 +01001012
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001013void InstructionCodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
1014 BarrierType type = BarrierAll;
1015
1016 switch (kind) {
1017 case MemBarrierKind::kAnyAny:
1018 case MemBarrierKind::kAnyStore: {
1019 type = BarrierAll;
1020 break;
1021 }
1022 case MemBarrierKind::kLoadAny: {
1023 type = BarrierReads;
1024 break;
1025 }
1026 case MemBarrierKind::kStoreStore: {
1027 type = BarrierWrites;
1028 break;
1029 }
1030 default:
1031 LOG(FATAL) << "Unexpected memory barrier " << kind;
1032 }
1033 __ Dmb(InnerShareable, type);
1034}
1035
Serban Constantinescu02164b32014-11-13 14:05:07 +00001036void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1037 HBasicBlock* successor) {
1038 SuspendCheckSlowPathARM64* slow_path =
1039 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1040 codegen_->AddSlowPath(slow_path);
1041 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1042 Register temp = temps.AcquireW();
1043
1044 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1045 if (successor == nullptr) {
1046 __ Cbnz(temp, slow_path->GetEntryLabel());
1047 __ Bind(slow_path->GetReturnLabel());
1048 } else {
1049 __ Cbz(temp, codegen_->GetLabelOf(successor));
1050 __ B(slow_path->GetEntryLabel());
1051 // slow_path will return to GetLabelOf(successor).
1052 }
1053}
1054
Alexandre Rames5319def2014-10-23 10:03:10 +01001055InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1056 CodeGeneratorARM64* codegen)
1057 : HGraphVisitor(graph),
1058 assembler_(codegen->GetAssembler()),
1059 codegen_(codegen) {}
1060
1061#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001062 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001063
1064#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1065
1066enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001067 // Using a base helps identify when we hit such breakpoints.
1068 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001069#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1070 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1071#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1072};
1073
1074#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
1075 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001076 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +01001077 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1078 } \
1079 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1080 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1081 locations->SetOut(Location::Any()); \
1082 }
1083 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1084#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1085
1086#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001087#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001088
Alexandre Rames67555f72014-11-18 10:55:16 +00001089void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001090 DCHECK_EQ(instr->InputCount(), 2U);
1091 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1092 Primitive::Type type = instr->GetResultType();
1093 switch (type) {
1094 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001095 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001096 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001097 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001098 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001099 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001100
1101 case Primitive::kPrimFloat:
1102 case Primitive::kPrimDouble:
1103 locations->SetInAt(0, Location::RequiresFpuRegister());
1104 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001105 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001106 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001107
Alexandre Rames5319def2014-10-23 10:03:10 +01001108 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001109 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001110 }
1111}
1112
Alexandre Rames09a99962015-04-15 11:47:56 +01001113void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
1114 LocationSummary* locations =
1115 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1116 locations->SetInAt(0, Location::RequiresRegister());
1117 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1118 locations->SetOut(Location::RequiresFpuRegister());
1119 } else {
1120 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1121 }
1122}
1123
1124void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1125 const FieldInfo& field_info) {
1126 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001127 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001128
1129 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
1130 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
1131
1132 if (field_info.IsVolatile()) {
1133 if (use_acquire_release) {
1134 // NB: LoadAcquire will record the pc info if needed.
1135 codegen_->LoadAcquire(instruction, OutputCPURegister(instruction), field);
1136 } else {
1137 codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
1138 codegen_->MaybeRecordImplicitNullCheck(instruction);
1139 // For IRIW sequential consistency kLoadAny is not sufficient.
1140 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1141 }
1142 } else {
1143 codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
1144 codegen_->MaybeRecordImplicitNullCheck(instruction);
1145 }
1146}
1147
1148void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1149 LocationSummary* locations =
1150 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1151 locations->SetInAt(0, Location::RequiresRegister());
1152 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1153 locations->SetInAt(1, Location::RequiresFpuRegister());
1154 } else {
1155 locations->SetInAt(1, Location::RequiresRegister());
1156 }
1157}
1158
1159void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
1160 const FieldInfo& field_info) {
1161 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001162 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001163
1164 Register obj = InputRegisterAt(instruction, 0);
1165 CPURegister value = InputCPURegisterAt(instruction, 1);
1166 Offset offset = field_info.GetFieldOffset();
1167 Primitive::Type field_type = field_info.GetFieldType();
1168 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
1169
1170 if (field_info.IsVolatile()) {
1171 if (use_acquire_release) {
1172 codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
1173 codegen_->MaybeRecordImplicitNullCheck(instruction);
1174 } else {
1175 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
1176 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1177 codegen_->MaybeRecordImplicitNullCheck(instruction);
1178 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1179 }
1180 } else {
1181 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1182 codegen_->MaybeRecordImplicitNullCheck(instruction);
1183 }
1184
1185 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
1186 codegen_->MarkGCCard(obj, Register(value));
1187 }
1188}
1189
Alexandre Rames67555f72014-11-18 10:55:16 +00001190void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001191 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001192
1193 switch (type) {
1194 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001195 case Primitive::kPrimLong: {
1196 Register dst = OutputRegister(instr);
1197 Register lhs = InputRegisterAt(instr, 0);
1198 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001199 if (instr->IsAdd()) {
1200 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001201 } else if (instr->IsAnd()) {
1202 __ And(dst, lhs, rhs);
1203 } else if (instr->IsOr()) {
1204 __ Orr(dst, lhs, rhs);
1205 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001206 __ Sub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001207 } else {
1208 DCHECK(instr->IsXor());
1209 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001210 }
1211 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001212 }
1213 case Primitive::kPrimFloat:
1214 case Primitive::kPrimDouble: {
1215 FPRegister dst = OutputFPRegister(instr);
1216 FPRegister lhs = InputFPRegisterAt(instr, 0);
1217 FPRegister rhs = InputFPRegisterAt(instr, 1);
1218 if (instr->IsAdd()) {
1219 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001220 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001221 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001222 } else {
1223 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001224 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001225 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001226 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001227 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001228 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001229 }
1230}
1231
Serban Constantinescu02164b32014-11-13 14:05:07 +00001232void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1233 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1234
1235 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1236 Primitive::Type type = instr->GetResultType();
1237 switch (type) {
1238 case Primitive::kPrimInt:
1239 case Primitive::kPrimLong: {
1240 locations->SetInAt(0, Location::RequiresRegister());
1241 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1242 locations->SetOut(Location::RequiresRegister());
1243 break;
1244 }
1245 default:
1246 LOG(FATAL) << "Unexpected shift type " << type;
1247 }
1248}
1249
1250void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1251 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1252
1253 Primitive::Type type = instr->GetType();
1254 switch (type) {
1255 case Primitive::kPrimInt:
1256 case Primitive::kPrimLong: {
1257 Register dst = OutputRegister(instr);
1258 Register lhs = InputRegisterAt(instr, 0);
1259 Operand rhs = InputOperandAt(instr, 1);
1260 if (rhs.IsImmediate()) {
1261 uint32_t shift_value = (type == Primitive::kPrimInt)
1262 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1263 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1264 if (instr->IsShl()) {
1265 __ Lsl(dst, lhs, shift_value);
1266 } else if (instr->IsShr()) {
1267 __ Asr(dst, lhs, shift_value);
1268 } else {
1269 __ Lsr(dst, lhs, shift_value);
1270 }
1271 } else {
1272 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1273
1274 if (instr->IsShl()) {
1275 __ Lsl(dst, lhs, rhs_reg);
1276 } else if (instr->IsShr()) {
1277 __ Asr(dst, lhs, rhs_reg);
1278 } else {
1279 __ Lsr(dst, lhs, rhs_reg);
1280 }
1281 }
1282 break;
1283 }
1284 default:
1285 LOG(FATAL) << "Unexpected shift operation type " << type;
1286 }
1287}
1288
Alexandre Rames5319def2014-10-23 10:03:10 +01001289void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001290 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001291}
1292
1293void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001294 HandleBinaryOp(instruction);
1295}
1296
1297void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1298 HandleBinaryOp(instruction);
1299}
1300
1301void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1302 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001303}
1304
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001305void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1306 LocationSummary* locations =
1307 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1308 locations->SetInAt(0, Location::RequiresRegister());
1309 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01001310 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1311 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1312 } else {
1313 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1314 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001315}
1316
1317void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1318 LocationSummary* locations = instruction->GetLocations();
1319 Primitive::Type type = instruction->GetType();
1320 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001321 Location index = locations->InAt(1);
1322 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001323 MemOperand source = HeapOperand(obj);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001324 MacroAssembler* masm = GetVIXLAssembler();
1325 UseScratchRegisterScope temps(masm);
1326 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001327
1328 if (index.IsConstant()) {
1329 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001330 source = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001331 } else {
1332 Register temp = temps.AcquireSameSizeAs(obj);
1333 Register index_reg = RegisterFrom(index, Primitive::kPrimInt);
1334 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001335 source = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001336 }
1337
Alexandre Rames67555f72014-11-18 10:55:16 +00001338 codegen_->Load(type, OutputCPURegister(instruction), source);
Calin Juravle77520bc2015-01-12 18:45:46 +00001339 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001340}
1341
Alexandre Rames5319def2014-10-23 10:03:10 +01001342void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1343 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1344 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001345 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001346}
1347
1348void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001349 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001350 __ Ldr(OutputRegister(instruction),
1351 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
Calin Juravle77520bc2015-01-12 18:45:46 +00001352 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001353}
1354
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001355void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Alexandre Rames97833a02015-04-16 15:07:12 +01001356 if (instruction->NeedsTypeCheck()) {
1357 LocationSummary* locations =
1358 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001359 InvokeRuntimeCallingConvention calling_convention;
1360 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1361 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1362 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1363 } else {
Alexandre Rames97833a02015-04-16 15:07:12 +01001364 LocationSummary* locations =
1365 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001366 locations->SetInAt(0, Location::RequiresRegister());
1367 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01001368 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
1369 locations->SetInAt(2, Location::RequiresFpuRegister());
1370 } else {
1371 locations->SetInAt(2, Location::RequiresRegister());
1372 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001373 }
1374}
1375
1376void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1377 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01001378 LocationSummary* locations = instruction->GetLocations();
1379 bool needs_runtime_call = locations->WillCall();
1380
1381 if (needs_runtime_call) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001382 codegen_->InvokeRuntime(
1383 QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001384 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001385 } else {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001386 Register obj = InputRegisterAt(instruction, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001387 CPURegister value = InputCPURegisterAt(instruction, 2);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001388 Location index = locations->InAt(1);
1389 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001390 MemOperand destination = HeapOperand(obj);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001391 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001392 BlockPoolsScope block_pools(masm);
Alexandre Rames97833a02015-04-16 15:07:12 +01001393 {
1394 // We use a block to end the scratch scope before the write barrier, thus
1395 // freeing the temporary registers so they can be used in `MarkGCCard`.
1396 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001397
Alexandre Rames97833a02015-04-16 15:07:12 +01001398 if (index.IsConstant()) {
1399 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
1400 destination = HeapOperand(obj, offset);
1401 } else {
1402 Register temp = temps.AcquireSameSizeAs(obj);
1403 Register index_reg = InputRegisterAt(instruction, 1);
1404 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type)));
1405 destination = HeapOperand(temp, offset);
1406 }
1407
1408 codegen_->Store(value_type, value, destination);
1409 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001410 }
Alexandre Rames97833a02015-04-16 15:07:12 +01001411 if (CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue())) {
1412 codegen_->MarkGCCard(obj, value.W());
1413 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001414 }
1415}
1416
Alexandre Rames67555f72014-11-18 10:55:16 +00001417void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1418 LocationSummary* locations =
1419 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1420 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00001421 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00001422 if (instruction->HasUses()) {
1423 locations->SetOut(Location::SameAsFirstInput());
1424 }
1425}
1426
1427void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001428 LocationSummary* locations = instruction->GetLocations();
1429 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1430 instruction, locations->InAt(0), locations->InAt(1));
Alexandre Rames67555f72014-11-18 10:55:16 +00001431 codegen_->AddSlowPath(slow_path);
1432
1433 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1434 __ B(slow_path->GetEntryLabel(), hs);
1435}
1436
1437void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
1438 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1439 instruction, LocationSummary::kCallOnSlowPath);
1440 locations->SetInAt(0, Location::RequiresRegister());
1441 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001442 locations->AddTemp(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001443}
1444
1445void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001446 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +00001447 Register obj = InputRegisterAt(instruction, 0);;
1448 Register cls = InputRegisterAt(instruction, 1);;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001449 Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
Alexandre Rames67555f72014-11-18 10:55:16 +00001450
Alexandre Rames3e69f162014-12-10 10:36:50 +00001451 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1452 instruction, locations->InAt(1), LocationFrom(obj_cls), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001453 codegen_->AddSlowPath(slow_path);
1454
1455 // TODO: avoid this check if we know obj is not null.
1456 __ Cbz(obj, slow_path->GetExitLabel());
1457 // Compare the class of `obj` with `cls`.
Alexandre Rames3e69f162014-12-10 10:36:50 +00001458 __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
1459 __ Cmp(obj_cls, cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00001460 __ B(ne, slow_path->GetEntryLabel());
1461 __ Bind(slow_path->GetExitLabel());
1462}
1463
1464void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1465 LocationSummary* locations =
1466 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1467 locations->SetInAt(0, Location::RequiresRegister());
1468 if (check->HasUses()) {
1469 locations->SetOut(Location::SameAsFirstInput());
1470 }
1471}
1472
1473void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1474 // We assume the class is not null.
1475 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1476 check->GetLoadClass(), check, check->GetDexPc(), true);
1477 codegen_->AddSlowPath(slow_path);
1478 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1479}
1480
Serban Constantinescu02164b32014-11-13 14:05:07 +00001481void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001482 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00001483 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1484 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001485 switch (in_type) {
1486 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001487 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001488 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001489 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1490 break;
1491 }
1492 case Primitive::kPrimFloat:
1493 case Primitive::kPrimDouble: {
1494 locations->SetInAt(0, Location::RequiresFpuRegister());
Alexandre Rames93415462015-02-17 15:08:20 +00001495 HInstruction* right = compare->InputAt(1);
1496 if ((right->IsFloatConstant() && (right->AsFloatConstant()->GetValue() == 0.0f)) ||
1497 (right->IsDoubleConstant() && (right->AsDoubleConstant()->GetValue() == 0.0))) {
1498 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1499 } else {
1500 locations->SetInAt(1, Location::RequiresFpuRegister());
1501 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001502 locations->SetOut(Location::RequiresRegister());
1503 break;
1504 }
1505 default:
1506 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1507 }
1508}
1509
1510void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1511 Primitive::Type in_type = compare->InputAt(0)->GetType();
1512
1513 // 0 if: left == right
1514 // 1 if: left > right
1515 // -1 if: left < right
1516 switch (in_type) {
1517 case Primitive::kPrimLong: {
1518 Register result = OutputRegister(compare);
1519 Register left = InputRegisterAt(compare, 0);
1520 Operand right = InputOperandAt(compare, 1);
1521
1522 __ Cmp(left, right);
1523 __ Cset(result, ne);
1524 __ Cneg(result, result, lt);
1525 break;
1526 }
1527 case Primitive::kPrimFloat:
1528 case Primitive::kPrimDouble: {
1529 Register result = OutputRegister(compare);
1530 FPRegister left = InputFPRegisterAt(compare, 0);
Alexandre Rames93415462015-02-17 15:08:20 +00001531 if (compare->GetLocations()->InAt(1).IsConstant()) {
1532 if (kIsDebugBuild) {
1533 HInstruction* right = compare->GetLocations()->InAt(1).GetConstant();
1534 DCHECK((right->IsFloatConstant() && (right->AsFloatConstant()->GetValue() == 0.0f)) ||
1535 (right->IsDoubleConstant() && (right->AsDoubleConstant()->GetValue() == 0.0)));
1536 }
1537 // 0.0 is the only immediate that can be encoded directly in a FCMP instruction.
1538 __ Fcmp(left, 0.0);
1539 } else {
1540 __ Fcmp(left, InputFPRegisterAt(compare, 1));
1541 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001542 if (compare->IsGtBias()) {
1543 __ Cset(result, ne);
1544 } else {
1545 __ Csetm(result, ne);
1546 }
1547 __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
Alexandre Rames5319def2014-10-23 10:03:10 +01001548 break;
1549 }
1550 default:
1551 LOG(FATAL) << "Unimplemented compare type " << in_type;
1552 }
1553}
1554
1555void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1556 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1557 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001558 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames5319def2014-10-23 10:03:10 +01001559 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001560 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001561 }
1562}
1563
1564void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1565 if (!instruction->NeedsMaterialization()) {
1566 return;
1567 }
1568
1569 LocationSummary* locations = instruction->GetLocations();
1570 Register lhs = InputRegisterAt(instruction, 0);
1571 Operand rhs = InputOperandAt(instruction, 1);
1572 Register res = RegisterFrom(locations->Out(), instruction->GetType());
1573 Condition cond = ARM64Condition(instruction->GetCondition());
1574
1575 __ Cmp(lhs, rhs);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001576 __ Cset(res, cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001577}
1578
1579#define FOR_EACH_CONDITION_INSTRUCTION(M) \
1580 M(Equal) \
1581 M(NotEqual) \
1582 M(LessThan) \
1583 M(LessThanOrEqual) \
1584 M(GreaterThan) \
1585 M(GreaterThanOrEqual)
1586#define DEFINE_CONDITION_VISITORS(Name) \
1587void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
1588void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1589FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00001590#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01001591#undef FOR_EACH_CONDITION_INSTRUCTION
1592
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001593void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1594 LocationSummary* locations =
1595 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1596 switch (div->GetResultType()) {
1597 case Primitive::kPrimInt:
1598 case Primitive::kPrimLong:
1599 locations->SetInAt(0, Location::RequiresRegister());
1600 locations->SetInAt(1, Location::RequiresRegister());
1601 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1602 break;
1603
1604 case Primitive::kPrimFloat:
1605 case Primitive::kPrimDouble:
1606 locations->SetInAt(0, Location::RequiresFpuRegister());
1607 locations->SetInAt(1, Location::RequiresFpuRegister());
1608 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1609 break;
1610
1611 default:
1612 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1613 }
1614}
1615
1616void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1617 Primitive::Type type = div->GetResultType();
1618 switch (type) {
1619 case Primitive::kPrimInt:
1620 case Primitive::kPrimLong:
1621 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
1622 break;
1623
1624 case Primitive::kPrimFloat:
1625 case Primitive::kPrimDouble:
1626 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1627 break;
1628
1629 default:
1630 LOG(FATAL) << "Unexpected div type " << type;
1631 }
1632}
1633
Alexandre Rames67555f72014-11-18 10:55:16 +00001634void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1635 LocationSummary* locations =
1636 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1637 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1638 if (instruction->HasUses()) {
1639 locations->SetOut(Location::SameAsFirstInput());
1640 }
1641}
1642
1643void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1644 SlowPathCodeARM64* slow_path =
1645 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
1646 codegen_->AddSlowPath(slow_path);
1647 Location value = instruction->GetLocations()->InAt(0);
1648
Alexandre Rames3e69f162014-12-10 10:36:50 +00001649 Primitive::Type type = instruction->GetType();
1650
1651 if ((type != Primitive::kPrimInt) && (type != Primitive::kPrimLong)) {
1652 LOG(FATAL) << "Unexpected type " << type << "for DivZeroCheck.";
1653 return;
1654 }
1655
Alexandre Rames67555f72014-11-18 10:55:16 +00001656 if (value.IsConstant()) {
1657 int64_t divisor = Int64ConstantFrom(value);
1658 if (divisor == 0) {
1659 __ B(slow_path->GetEntryLabel());
1660 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001661 // A division by a non-null constant is valid. We don't need to perform
1662 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00001663 }
1664 } else {
1665 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
1666 }
1667}
1668
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001669void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1670 LocationSummary* locations =
1671 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1672 locations->SetOut(Location::ConstantLocation(constant));
1673}
1674
1675void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1676 UNUSED(constant);
1677 // Will be generated at use site.
1678}
1679
Alexandre Rames5319def2014-10-23 10:03:10 +01001680void LocationsBuilderARM64::VisitExit(HExit* exit) {
1681 exit->SetLocations(nullptr);
1682}
1683
1684void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001685 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +01001686}
1687
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001688void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
1689 LocationSummary* locations =
1690 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1691 locations->SetOut(Location::ConstantLocation(constant));
1692}
1693
1694void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
1695 UNUSED(constant);
1696 // Will be generated at use site.
1697}
1698
Alexandre Rames5319def2014-10-23 10:03:10 +01001699void LocationsBuilderARM64::VisitGoto(HGoto* got) {
1700 got->SetLocations(nullptr);
1701}
1702
1703void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
1704 HBasicBlock* successor = got->GetSuccessor();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001705 DCHECK(!successor->IsExitBlock());
1706 HBasicBlock* block = got->GetBlock();
1707 HInstruction* previous = got->GetPrevious();
1708 HLoopInformation* info = block->GetLoopInformation();
1709
David Brazdil46e2a392015-03-16 17:31:52 +00001710 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001711 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
1712 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1713 return;
1714 }
1715 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1716 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1717 }
1718 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001719 __ B(codegen_->GetLabelOf(successor));
1720 }
1721}
1722
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001723void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
1724 vixl::Label* true_target,
1725 vixl::Label* false_target,
1726 vixl::Label* always_true_target) {
1727 HInstruction* cond = instruction->InputAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001728 HCondition* condition = cond->AsCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01001729
Serban Constantinescu02164b32014-11-13 14:05:07 +00001730 if (cond->IsIntConstant()) {
1731 int32_t cond_value = cond->AsIntConstant()->GetValue();
1732 if (cond_value == 1) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001733 if (always_true_target != nullptr) {
1734 __ B(always_true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001735 }
1736 return;
1737 } else {
1738 DCHECK_EQ(cond_value, 0);
1739 }
1740 } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001741 // The condition instruction has been materialized, compare the output to 0.
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001742 Location cond_val = instruction->GetLocations()->InAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001743 DCHECK(cond_val.IsRegister());
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001744 __ Cbnz(InputRegisterAt(instruction, 0), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001745 } else {
1746 // The condition instruction has not been materialized, use its inputs as
1747 // the comparison and its condition as the branch condition.
1748 Register lhs = InputRegisterAt(condition, 0);
1749 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001750 Condition arm64_cond = ARM64Condition(condition->GetCondition());
Alexandre Rames4388dcc2015-02-03 10:28:33 +00001751 if ((arm64_cond != gt && arm64_cond != le) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
1752 switch (arm64_cond) {
1753 case eq:
1754 __ Cbz(lhs, true_target);
1755 break;
1756 case ne:
1757 __ Cbnz(lhs, true_target);
1758 break;
1759 case lt:
1760 // Test the sign bit and branch accordingly.
1761 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, true_target);
1762 break;
1763 case ge:
1764 // Test the sign bit and branch accordingly.
1765 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, true_target);
1766 break;
1767 default:
1768 // Without the `static_cast` the compiler throws an error for
1769 // `-Werror=sign-promo`.
1770 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001771 }
1772 } else {
1773 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001774 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001775 }
1776 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001777 if (false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001778 __ B(false_target);
1779 }
1780}
1781
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001782void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
1783 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1784 HInstruction* cond = if_instr->InputAt(0);
1785 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
1786 locations->SetInAt(0, Location::RequiresRegister());
1787 }
1788}
1789
1790void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
1791 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1792 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1793 vixl::Label* always_true_target = true_target;
1794 if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
1795 if_instr->IfTrueSuccessor())) {
1796 always_true_target = nullptr;
1797 }
1798 if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
1799 if_instr->IfFalseSuccessor())) {
1800 false_target = nullptr;
1801 }
1802 GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
1803}
1804
1805void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
1806 LocationSummary* locations = new (GetGraph()->GetArena())
1807 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
1808 HInstruction* cond = deoptimize->InputAt(0);
1809 DCHECK(cond->IsCondition());
1810 if (cond->AsCondition()->NeedsMaterialization()) {
1811 locations->SetInAt(0, Location::RequiresRegister());
1812 }
1813}
1814
1815void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
1816 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
1817 DeoptimizationSlowPathARM64(deoptimize);
1818 codegen_->AddSlowPath(slow_path);
1819 vixl::Label* slow_path_entry = slow_path->GetEntryLabel();
1820 GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
1821}
1822
Alexandre Rames5319def2014-10-23 10:03:10 +01001823void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001824 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001825}
1826
1827void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001828 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01001829}
1830
1831void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001832 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001833}
1834
1835void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001836 HandleFieldSet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01001837}
1838
Alexandre Rames67555f72014-11-18 10:55:16 +00001839void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
1840 LocationSummary::CallKind call_kind =
1841 instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
1842 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
1843 locations->SetInAt(0, Location::RequiresRegister());
1844 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001845 // The output does overlap inputs.
1846 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexandre Rames67555f72014-11-18 10:55:16 +00001847}
1848
1849void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
1850 LocationSummary* locations = instruction->GetLocations();
1851 Register obj = InputRegisterAt(instruction, 0);;
1852 Register cls = InputRegisterAt(instruction, 1);;
1853 Register out = OutputRegister(instruction);
1854
1855 vixl::Label done;
1856
1857 // Return 0 if `obj` is null.
1858 // TODO: Avoid this check if we know `obj` is not null.
1859 __ Mov(out, 0);
1860 __ Cbz(obj, &done);
1861
1862 // Compare the class of `obj` with `cls`.
Serban Constantinescu02164b32014-11-13 14:05:07 +00001863 __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
Alexandre Rames67555f72014-11-18 10:55:16 +00001864 __ Cmp(out, cls);
1865 if (instruction->IsClassFinal()) {
1866 // Classes must be equal for the instanceof to succeed.
1867 __ Cset(out, eq);
1868 } else {
1869 // If the classes are not equal, we go into a slow path.
1870 DCHECK(locations->OnlyCallsOnSlowPath());
1871 SlowPathCodeARM64* slow_path =
Alexandre Rames3e69f162014-12-10 10:36:50 +00001872 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1873 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001874 codegen_->AddSlowPath(slow_path);
1875 __ B(ne, slow_path->GetEntryLabel());
1876 __ Mov(out, 1);
1877 __ Bind(slow_path->GetExitLabel());
1878 }
1879
1880 __ Bind(&done);
1881}
1882
Alexandre Rames5319def2014-10-23 10:03:10 +01001883void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
1884 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1885 locations->SetOut(Location::ConstantLocation(constant));
1886}
1887
1888void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
1889 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001890 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001891}
1892
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001893void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
1894 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1895 locations->SetOut(Location::ConstantLocation(constant));
1896}
1897
1898void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant) {
1899 // Will be generated at use site.
1900 UNUSED(constant);
1901}
1902
Alexandre Rames5319def2014-10-23 10:03:10 +01001903void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
1904 LocationSummary* locations =
1905 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1906 locations->AddTemp(LocationFrom(x0));
1907
1908 InvokeDexCallingConventionVisitor calling_convention_visitor;
1909 for (size_t i = 0; i < invoke->InputCount(); i++) {
1910 HInstruction* input = invoke->InputAt(i);
1911 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1912 }
1913
1914 Primitive::Type return_type = invoke->GetType();
1915 if (return_type != Primitive::kPrimVoid) {
1916 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
1917 }
1918}
1919
Alexandre Rames67555f72014-11-18 10:55:16 +00001920void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1921 HandleInvoke(invoke);
1922}
1923
1924void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1925 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1926 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1927 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1928 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1929 Location receiver = invoke->GetLocations()->InAt(0);
1930 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00001931 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00001932
1933 // The register ip1 is required to be used for the hidden argument in
1934 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01001935 MacroAssembler* masm = GetVIXLAssembler();
1936 UseScratchRegisterScope scratch_scope(masm);
1937 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00001938 scratch_scope.Exclude(ip1);
1939 __ Mov(ip1, invoke->GetDexMethodIndex());
1940
1941 // temp = object->GetClass();
1942 if (receiver.IsStackSlot()) {
1943 __ Ldr(temp, StackOperandFrom(receiver));
1944 __ Ldr(temp, HeapOperand(temp, class_offset));
1945 } else {
1946 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
1947 }
Calin Juravle77520bc2015-01-12 18:45:46 +00001948 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00001949 // temp = temp->GetImtEntryAt(method_offset);
1950 __ Ldr(temp, HeapOperand(temp, method_offset));
1951 // lr = temp->GetEntryPoint();
1952 __ Ldr(lr, HeapOperand(temp, entry_point));
1953 // lr();
1954 __ Blr(lr);
1955 DCHECK(!codegen_->IsLeafMethod());
1956 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1957}
1958
1959void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001960 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
1961 if (intrinsic.TryDispatch(invoke)) {
1962 return;
1963 }
1964
Alexandre Rames67555f72014-11-18 10:55:16 +00001965 HandleInvoke(invoke);
1966}
1967
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001968void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain4c0eb422015-04-24 16:43:49 +01001969 // Explicit clinit checks triggered by static invokes must have been
1970 // pruned by art::PrepareForRegisterAllocation.
1971 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
1972
Andreas Gampe878d58c2015-01-15 23:24:00 -08001973 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
1974 if (intrinsic.TryDispatch(invoke)) {
1975 return;
1976 }
1977
Alexandre Rames67555f72014-11-18 10:55:16 +00001978 HandleInvoke(invoke);
1979}
1980
Andreas Gampe878d58c2015-01-15 23:24:00 -08001981static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
1982 if (invoke->GetLocations()->Intrinsified()) {
1983 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
1984 intrinsic.Dispatch(invoke);
1985 return true;
1986 }
1987 return false;
1988}
1989
1990void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
1991 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
1992 DCHECK(temp.Is(kArtMethodRegister));
Alexandre Rames5319def2014-10-23 10:03:10 +01001993 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
Andreas Gampe878d58c2015-01-15 23:24:00 -08001994 invoke->GetDexMethodIndex() * kHeapRefSize;
Alexandre Rames5319def2014-10-23 10:03:10 +01001995
1996 // TODO: Implement all kinds of calls:
1997 // 1) boot -> boot
1998 // 2) app -> boot
1999 // 3) app -> app
2000 //
2001 // Currently we implement the app -> app logic, which looks up in the resolve cache.
2002
Nicolas Geoffray0a299b92015-01-29 11:39:44 +00002003 // temp = method;
2004 LoadCurrentMethod(temp);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002005 if (!invoke->IsRecursive()) {
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002006 // temp = temp->dex_cache_resolved_methods_;
2007 __ Ldr(temp, HeapOperand(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset()));
2008 // temp = temp[index_in_cache];
2009 __ Ldr(temp, HeapOperand(temp, index_in_cache));
2010 // lr = temp->entry_point_from_quick_compiled_code_;
2011 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
2012 kArm64WordSize)));
2013 // lr();
2014 __ Blr(lr);
2015 } else {
2016 __ Bl(&frame_entry_label_);
2017 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002018
Andreas Gampe878d58c2015-01-15 23:24:00 -08002019 DCHECK(!IsLeafMethod());
2020}
2021
2022void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain4c0eb422015-04-24 16:43:49 +01002023 // Explicit clinit checks triggered by static invokes must have been
2024 // pruned by art::PrepareForRegisterAllocation.
2025 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
2026
Andreas Gampe878d58c2015-01-15 23:24:00 -08002027 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2028 return;
2029 }
2030
Alexandre Ramesd921d642015-04-16 15:07:16 +01002031 BlockPoolsScope block_pools(GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -08002032 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
2033 codegen_->GenerateStaticOrDirectCall(invoke, temp);
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002034 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01002035}
2036
2037void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08002038 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2039 return;
2040 }
2041
Alexandre Rames5319def2014-10-23 10:03:10 +01002042 LocationSummary* locations = invoke->GetLocations();
2043 Location receiver = locations->InAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002044 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002045 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
2046 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
2047 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00002048 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames5319def2014-10-23 10:03:10 +01002049
Alexandre Ramesd921d642015-04-16 15:07:16 +01002050 BlockPoolsScope block_pools(GetVIXLAssembler());
2051
Alexandre Rames5319def2014-10-23 10:03:10 +01002052 // temp = object->GetClass();
2053 if (receiver.IsStackSlot()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002054 __ Ldr(temp, MemOperand(sp, receiver.GetStackIndex()));
2055 __ Ldr(temp, HeapOperand(temp, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002056 } else {
2057 DCHECK(receiver.IsRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002058 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002059 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002060 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames5319def2014-10-23 10:03:10 +01002061 // temp = temp->GetMethodAt(method_offset);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002062 __ Ldr(temp, HeapOperand(temp, method_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002063 // lr = temp->GetEntryPoint();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002064 __ Ldr(lr, HeapOperand(temp, entry_point.SizeValue()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002065 // lr();
2066 __ Blr(lr);
2067 DCHECK(!codegen_->IsLeafMethod());
2068 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2069}
2070
Alexandre Rames67555f72014-11-18 10:55:16 +00002071void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
2072 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
2073 : LocationSummary::kNoCall;
2074 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2075 locations->SetOut(Location::RequiresRegister());
2076}
2077
2078void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
2079 Register out = OutputRegister(cls);
2080 if (cls->IsReferrersClass()) {
2081 DCHECK(!cls->CanCallRuntime());
2082 DCHECK(!cls->MustGenerateClinitCheck());
2083 codegen_->LoadCurrentMethod(out);
2084 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2085 } else {
2086 DCHECK(cls->CanCallRuntime());
2087 codegen_->LoadCurrentMethod(out);
2088 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002089 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002090
2091 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2092 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2093 codegen_->AddSlowPath(slow_path);
2094 __ Cbz(out, slow_path->GetEntryLabel());
2095 if (cls->MustGenerateClinitCheck()) {
2096 GenerateClassInitializationCheck(slow_path, out);
2097 } else {
2098 __ Bind(slow_path->GetExitLabel());
2099 }
2100 }
2101}
2102
2103void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
2104 LocationSummary* locations =
2105 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2106 locations->SetOut(Location::RequiresRegister());
2107}
2108
2109void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
2110 MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
2111 __ Ldr(OutputRegister(instruction), exception);
2112 __ Str(wzr, exception);
2113}
2114
Alexandre Rames5319def2014-10-23 10:03:10 +01002115void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
2116 load->SetLocations(nullptr);
2117}
2118
2119void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
2120 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002121 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01002122}
2123
Alexandre Rames67555f72014-11-18 10:55:16 +00002124void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
2125 LocationSummary* locations =
2126 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2127 locations->SetOut(Location::RequiresRegister());
2128}
2129
2130void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
2131 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
2132 codegen_->AddSlowPath(slow_path);
2133
2134 Register out = OutputRegister(load);
2135 codegen_->LoadCurrentMethod(out);
Mathieu Chartiereace4582014-11-24 18:29:54 -08002136 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2137 __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002138 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002139 __ Cbz(out, slow_path->GetEntryLabel());
2140 __ Bind(slow_path->GetExitLabel());
2141}
2142
Alexandre Rames5319def2014-10-23 10:03:10 +01002143void LocationsBuilderARM64::VisitLocal(HLocal* local) {
2144 local->SetLocations(nullptr);
2145}
2146
2147void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
2148 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
2149}
2150
2151void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
2152 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2153 locations->SetOut(Location::ConstantLocation(constant));
2154}
2155
2156void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
2157 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002158 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01002159}
2160
Alexandre Rames67555f72014-11-18 10:55:16 +00002161void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2162 LocationSummary* locations =
2163 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2164 InvokeRuntimeCallingConvention calling_convention;
2165 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2166}
2167
2168void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2169 codegen_->InvokeRuntime(instruction->IsEnter()
2170 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2171 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002172 instruction->GetDexPc(),
2173 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002174 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002175}
2176
Alexandre Rames42d641b2014-10-27 14:00:51 +00002177void LocationsBuilderARM64::VisitMul(HMul* mul) {
2178 LocationSummary* locations =
2179 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2180 switch (mul->GetResultType()) {
2181 case Primitive::kPrimInt:
2182 case Primitive::kPrimLong:
2183 locations->SetInAt(0, Location::RequiresRegister());
2184 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002185 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002186 break;
2187
2188 case Primitive::kPrimFloat:
2189 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002190 locations->SetInAt(0, Location::RequiresFpuRegister());
2191 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002192 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002193 break;
2194
2195 default:
2196 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2197 }
2198}
2199
2200void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
2201 switch (mul->GetResultType()) {
2202 case Primitive::kPrimInt:
2203 case Primitive::kPrimLong:
2204 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2205 break;
2206
2207 case Primitive::kPrimFloat:
2208 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002209 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00002210 break;
2211
2212 default:
2213 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2214 }
2215}
2216
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002217void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
2218 LocationSummary* locations =
2219 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2220 switch (neg->GetResultType()) {
2221 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00002222 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002223 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00002224 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002225 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002226
2227 case Primitive::kPrimFloat:
2228 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002229 locations->SetInAt(0, Location::RequiresFpuRegister());
2230 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002231 break;
2232
2233 default:
2234 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2235 }
2236}
2237
2238void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
2239 switch (neg->GetResultType()) {
2240 case Primitive::kPrimInt:
2241 case Primitive::kPrimLong:
2242 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
2243 break;
2244
2245 case Primitive::kPrimFloat:
2246 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002247 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002248 break;
2249
2250 default:
2251 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2252 }
2253}
2254
2255void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
2256 LocationSummary* locations =
2257 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2258 InvokeRuntimeCallingConvention calling_convention;
2259 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002260 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002261 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002262 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2263 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2264 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002265}
2266
2267void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
2268 LocationSummary* locations = instruction->GetLocations();
2269 InvokeRuntimeCallingConvention calling_convention;
2270 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2271 DCHECK(type_index.Is(w0));
2272 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002273 DCHECK(current_method.Is(w2));
Alexandre Rames67555f72014-11-18 10:55:16 +00002274 codegen_->LoadCurrentMethod(current_method);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002275 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002276 codegen_->InvokeRuntime(
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +00002277 GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
2278 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002279 instruction->GetDexPc(),
2280 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002281 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2282 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002283}
2284
Alexandre Rames5319def2014-10-23 10:03:10 +01002285void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
2286 LocationSummary* locations =
2287 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2288 InvokeRuntimeCallingConvention calling_convention;
2289 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2290 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2291 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002292 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002293}
2294
2295void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
2296 LocationSummary* locations = instruction->GetLocations();
2297 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2298 DCHECK(type_index.Is(w0));
2299 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2300 DCHECK(current_method.Is(w1));
Alexandre Rames67555f72014-11-18 10:55:16 +00002301 codegen_->LoadCurrentMethod(current_method);
Alexandre Rames5319def2014-10-23 10:03:10 +01002302 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002303 codegen_->InvokeRuntime(
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +00002304 GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
2305 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002306 instruction->GetDexPc(),
2307 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002308 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002309}
2310
2311void LocationsBuilderARM64::VisitNot(HNot* instruction) {
2312 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00002313 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002314 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002315}
2316
2317void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00002318 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002319 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002320 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01002321 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002322 break;
2323
2324 default:
2325 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
2326 }
2327}
2328
David Brazdil66d126e2015-04-03 16:02:44 +01002329void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
2330 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2331 locations->SetInAt(0, Location::RequiresRegister());
2332 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2333}
2334
2335void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01002336 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
2337}
2338
Alexandre Rames5319def2014-10-23 10:03:10 +01002339void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
2340 LocationSummary* locations =
2341 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2342 locations->SetInAt(0, Location::RequiresRegister());
2343 if (instruction->HasUses()) {
2344 locations->SetOut(Location::SameAsFirstInput());
2345 }
2346}
2347
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002348void InstructionCodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00002349 if (codegen_->CanMoveNullCheckToUser(instruction)) {
2350 return;
2351 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002352
Alexandre Ramesd921d642015-04-16 15:07:16 +01002353 BlockPoolsScope block_pools(GetVIXLAssembler());
2354 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002355 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
2356 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2357}
2358
2359void InstructionCodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002360 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
2361 codegen_->AddSlowPath(slow_path);
2362
2363 LocationSummary* locations = instruction->GetLocations();
2364 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00002365
2366 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01002367}
2368
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002369void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
2370 if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2371 GenerateImplicitNullCheck(instruction);
2372 } else {
2373 GenerateExplicitNullCheck(instruction);
2374 }
2375}
2376
Alexandre Rames67555f72014-11-18 10:55:16 +00002377void LocationsBuilderARM64::VisitOr(HOr* instruction) {
2378 HandleBinaryOp(instruction);
2379}
2380
2381void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
2382 HandleBinaryOp(instruction);
2383}
2384
Alexandre Rames3e69f162014-12-10 10:36:50 +00002385void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2386 LOG(FATAL) << "Unreachable";
2387}
2388
2389void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
2390 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2391}
2392
Alexandre Rames5319def2014-10-23 10:03:10 +01002393void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
2394 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2395 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2396 if (location.IsStackSlot()) {
2397 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2398 } else if (location.IsDoubleStackSlot()) {
2399 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2400 }
2401 locations->SetOut(location);
2402}
2403
2404void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
2405 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002406 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002407}
2408
2409void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
2410 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2411 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2412 locations->SetInAt(i, Location::Any());
2413 }
2414 locations->SetOut(Location::Any());
2415}
2416
2417void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002418 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002419 LOG(FATAL) << "Unreachable";
2420}
2421
Serban Constantinescu02164b32014-11-13 14:05:07 +00002422void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002423 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00002424 LocationSummary::CallKind call_kind =
2425 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002426 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2427
2428 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002429 case Primitive::kPrimInt:
2430 case Primitive::kPrimLong:
2431 locations->SetInAt(0, Location::RequiresRegister());
2432 locations->SetInAt(1, Location::RequiresRegister());
2433 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2434 break;
2435
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002436 case Primitive::kPrimFloat:
2437 case Primitive::kPrimDouble: {
2438 InvokeRuntimeCallingConvention calling_convention;
2439 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
2440 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
2441 locations->SetOut(calling_convention.GetReturnLocation(type));
2442
2443 break;
2444 }
2445
Serban Constantinescu02164b32014-11-13 14:05:07 +00002446 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002447 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00002448 }
2449}
2450
2451void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
2452 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002453
Serban Constantinescu02164b32014-11-13 14:05:07 +00002454 switch (type) {
2455 case Primitive::kPrimInt:
2456 case Primitive::kPrimLong: {
2457 UseScratchRegisterScope temps(GetVIXLAssembler());
2458 Register dividend = InputRegisterAt(rem, 0);
2459 Register divisor = InputRegisterAt(rem, 1);
2460 Register output = OutputRegister(rem);
2461 Register temp = temps.AcquireSameSizeAs(output);
2462
2463 __ Sdiv(temp, dividend, divisor);
2464 __ Msub(output, temp, divisor, dividend);
2465 break;
2466 }
2467
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002468 case Primitive::kPrimFloat:
2469 case Primitive::kPrimDouble: {
2470 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
2471 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002472 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002473 break;
2474 }
2475
Serban Constantinescu02164b32014-11-13 14:05:07 +00002476 default:
2477 LOG(FATAL) << "Unexpected rem type " << type;
2478 }
2479}
2480
Calin Juravle27df7582015-04-17 19:12:31 +01002481void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2482 memory_barrier->SetLocations(nullptr);
2483}
2484
2485void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2486 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
2487}
2488
Alexandre Rames5319def2014-10-23 10:03:10 +01002489void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
2490 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2491 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002492 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01002493}
2494
2495void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002496 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002497 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01002498}
2499
2500void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
2501 instruction->SetLocations(nullptr);
2502}
2503
2504void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002505 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002506 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01002507}
2508
Serban Constantinescu02164b32014-11-13 14:05:07 +00002509void LocationsBuilderARM64::VisitShl(HShl* shl) {
2510 HandleShift(shl);
2511}
2512
2513void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
2514 HandleShift(shl);
2515}
2516
2517void LocationsBuilderARM64::VisitShr(HShr* shr) {
2518 HandleShift(shr);
2519}
2520
2521void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
2522 HandleShift(shr);
2523}
2524
Alexandre Rames5319def2014-10-23 10:03:10 +01002525void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
2526 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
2527 Primitive::Type field_type = store->InputAt(1)->GetType();
2528 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002529 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01002530 case Primitive::kPrimBoolean:
2531 case Primitive::kPrimByte:
2532 case Primitive::kPrimChar:
2533 case Primitive::kPrimShort:
2534 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002535 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01002536 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
2537 break;
2538
2539 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002540 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01002541 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
2542 break;
2543
2544 default:
2545 LOG(FATAL) << "Unimplemented local type " << field_type;
2546 }
2547}
2548
2549void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002550 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01002551}
2552
2553void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002554 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002555}
2556
2557void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002558 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002559}
2560
Alexandre Rames67555f72014-11-18 10:55:16 +00002561void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002562 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002563}
2564
2565void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002566 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00002567}
2568
2569void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002570 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002571}
2572
Alexandre Rames67555f72014-11-18 10:55:16 +00002573void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002574 HandleFieldSet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01002575}
2576
2577void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2578 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2579}
2580
2581void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002582 HBasicBlock* block = instruction->GetBlock();
2583 if (block->GetLoopInformation() != nullptr) {
2584 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2585 // The back edge will generate the suspend check.
2586 return;
2587 }
2588 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2589 // The goto will generate the suspend check.
2590 return;
2591 }
2592 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01002593}
2594
2595void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
2596 temp->SetLocations(nullptr);
2597}
2598
2599void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
2600 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002601 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01002602}
2603
Alexandre Rames67555f72014-11-18 10:55:16 +00002604void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
2605 LocationSummary* locations =
2606 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2607 InvokeRuntimeCallingConvention calling_convention;
2608 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2609}
2610
2611void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
2612 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002613 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002614 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002615}
2616
2617void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
2618 LocationSummary* locations =
2619 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2620 Primitive::Type input_type = conversion->GetInputType();
2621 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002622 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00002623 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
2624 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
2625 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
2626 }
2627
Alexandre Rames542361f2015-01-29 16:57:31 +00002628 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002629 locations->SetInAt(0, Location::RequiresFpuRegister());
2630 } else {
2631 locations->SetInAt(0, Location::RequiresRegister());
2632 }
2633
Alexandre Rames542361f2015-01-29 16:57:31 +00002634 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002635 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2636 } else {
2637 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2638 }
2639}
2640
2641void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
2642 Primitive::Type result_type = conversion->GetResultType();
2643 Primitive::Type input_type = conversion->GetInputType();
2644
2645 DCHECK_NE(input_type, result_type);
2646
Alexandre Rames542361f2015-01-29 16:57:31 +00002647 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002648 int result_size = Primitive::ComponentSize(result_type);
2649 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002650 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002651 Register output = OutputRegister(conversion);
2652 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002653 if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
2654 __ Ubfx(output, source, 0, result_size * kBitsPerByte);
2655 } else if ((result_type == Primitive::kPrimChar) ||
2656 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
2657 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002658 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002659 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002660 }
Alexandre Rames542361f2015-01-29 16:57:31 +00002661 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002662 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00002663 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002664 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
2665 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00002666 } else if (Primitive::IsFloatingPointType(result_type) &&
2667 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002668 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
2669 } else {
2670 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
2671 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00002672 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002673}
Alexandre Rames67555f72014-11-18 10:55:16 +00002674
Serban Constantinescu02164b32014-11-13 14:05:07 +00002675void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
2676 HandleShift(ushr);
2677}
2678
2679void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
2680 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00002681}
2682
2683void LocationsBuilderARM64::VisitXor(HXor* instruction) {
2684 HandleBinaryOp(instruction);
2685}
2686
2687void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
2688 HandleBinaryOp(instruction);
2689}
2690
Calin Juravleb1498f62015-02-16 13:13:29 +00002691void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction) {
2692 // Nothing to do, this should be removed during prepare for register allocator.
2693 UNUSED(instruction);
2694 LOG(FATAL) << "Unreachable";
2695}
2696
2697void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction) {
2698 // Nothing to do, this should be removed during prepare for register allocator.
2699 UNUSED(instruction);
2700 LOG(FATAL) << "Unreachable";
2701}
2702
Alexandre Rames67555f72014-11-18 10:55:16 +00002703#undef __
2704#undef QUICK_ENTRY_POINT
2705
Alexandre Rames5319def2014-10-23 10:03:10 +01002706} // namespace arm64
2707} // namespace art