blob: 0222f93da4404e7094cb0c6468f93e78598286e3 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080020#include "common_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010021#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080022#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080024#include "intrinsics.h"
25#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010026#include "mirror/array-inl.h"
27#include "mirror/art_method.h"
28#include "mirror/class.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000029#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010030#include "thread.h"
31#include "utils/arm64/assembler_arm64.h"
32#include "utils/assembler.h"
33#include "utils/stack_checks.h"
34
35
36using namespace vixl; // NOLINT(build/namespaces)
37
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
44namespace arm64 {
45
Andreas Gampe878d58c2015-01-15 23:24:00 -080046using helpers::CPURegisterFrom;
47using helpers::DRegisterFrom;
48using helpers::FPRegisterFrom;
49using helpers::HeapOperand;
50using helpers::HeapOperandFrom;
51using helpers::InputCPURegisterAt;
52using helpers::InputFPRegisterAt;
53using helpers::InputRegisterAt;
54using helpers::InputOperandAt;
55using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080056using helpers::LocationFrom;
57using helpers::OperandFromMemOperand;
58using helpers::OutputCPURegister;
59using helpers::OutputFPRegister;
60using helpers::OutputRegister;
61using helpers::RegisterFrom;
62using helpers::StackOperandFrom;
63using helpers::VIXLRegCodeFromART;
64using helpers::WRegisterFrom;
65using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000066using helpers::ARM64EncodableConstantOrRegister;
Andreas Gampe878d58c2015-01-15 23:24:00 -080067
Alexandre Rames5319def2014-10-23 10:03:10 +010068static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
69static constexpr int kCurrentMethodStackOffset = 0;
70
Alexandre Rames5319def2014-10-23 10:03:10 +010071inline Condition ARM64Condition(IfCondition cond) {
72 switch (cond) {
73 case kCondEQ: return eq;
74 case kCondNE: return ne;
75 case kCondLT: return lt;
76 case kCondLE: return le;
77 case kCondGT: return gt;
78 case kCondGE: return ge;
79 default:
80 LOG(FATAL) << "Unknown if condition";
81 }
82 return nv; // Unreachable.
83}
84
Alexandre Ramesa89086e2014-11-07 17:13:25 +000085Location ARM64ReturnLocation(Primitive::Type return_type) {
86 DCHECK_NE(return_type, Primitive::kPrimVoid);
87 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
88 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
89 // but we use the exact registers for clarity.
90 if (return_type == Primitive::kPrimFloat) {
91 return LocationFrom(s0);
92 } else if (return_type == Primitive::kPrimDouble) {
93 return LocationFrom(d0);
94 } else if (return_type == Primitive::kPrimLong) {
95 return LocationFrom(x0);
96 } else {
97 return LocationFrom(w0);
98 }
99}
100
Alexandre Rames5319def2014-10-23 10:03:10 +0100101Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000102 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100103}
104
Alexandre Rames67555f72014-11-18 10:55:16 +0000105#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
106#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100107
Alexandre Rames5319def2014-10-23 10:03:10 +0100108class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
109 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000110 BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
111 Location index_location,
112 Location length_location)
113 : instruction_(instruction),
114 index_location_(index_location),
115 length_location_(length_location) {}
116
Alexandre Rames5319def2014-10-23 10:03:10 +0100117
Alexandre Rames67555f72014-11-18 10:55:16 +0000118 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000119 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100120 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000121 // We're moving two locations to locations that could overlap, so we need a parallel
122 // move resolver.
123 InvokeRuntimeCallingConvention calling_convention;
124 codegen->EmitParallelMoves(
Nicolas Geoffray90218252015-04-15 11:56:51 +0100125 index_location_, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
126 length_location_, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000127 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000128 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800129 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100130 }
131
132 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000133 HBoundsCheck* const instruction_;
134 const Location index_location_;
135 const Location length_location_;
136
Alexandre Rames5319def2014-10-23 10:03:10 +0100137 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
138};
139
Alexandre Rames67555f72014-11-18 10:55:16 +0000140class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
141 public:
142 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
143
144 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
145 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
146 __ Bind(GetEntryLabel());
147 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000148 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800149 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000150 }
151
152 private:
153 HDivZeroCheck* const instruction_;
154 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
155};
156
157class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
158 public:
159 LoadClassSlowPathARM64(HLoadClass* cls,
160 HInstruction* at,
161 uint32_t dex_pc,
162 bool do_clinit)
163 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
164 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
165 }
166
167 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
168 LocationSummary* locations = at_->GetLocations();
169 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
170
171 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000172 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000173
174 InvokeRuntimeCallingConvention calling_convention;
175 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000176 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
177 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000178 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800179 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100180 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800181 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100182 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800183 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000184
185 // Move the class to the desired location.
186 Location out = locations->Out();
187 if (out.IsValid()) {
188 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
189 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000190 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000191 }
192
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000193 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000194 __ B(GetExitLabel());
195 }
196
197 private:
198 // The class this slow path will load.
199 HLoadClass* const cls_;
200
201 // The instruction where this slow path is happening.
202 // (Might be the load class or an initialization check).
203 HInstruction* const at_;
204
205 // The dex PC of `at_`.
206 const uint32_t dex_pc_;
207
208 // Whether to initialize the class.
209 const bool do_clinit_;
210
211 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
212};
213
214class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
215 public:
216 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
217
218 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
219 LocationSummary* locations = instruction_->GetLocations();
220 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
221 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
222
223 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000224 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000225
226 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800227 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000228 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000229 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100230 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000231 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000232 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000233
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000234 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000235 __ B(GetExitLabel());
236 }
237
238 private:
239 HLoadString* const instruction_;
240
241 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
242};
243
Alexandre Rames5319def2014-10-23 10:03:10 +0100244class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
245 public:
246 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
247
Alexandre Rames67555f72014-11-18 10:55:16 +0000248 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
249 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100250 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000251 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000252 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800253 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 }
255
256 private:
257 HNullCheck* const instruction_;
258
259 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
260};
261
262class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
263 public:
264 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
265 HBasicBlock* successor)
266 : instruction_(instruction), successor_(successor) {}
267
Alexandre Rames67555f72014-11-18 10:55:16 +0000268 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
269 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100270 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000271 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000272 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000273 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800274 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000275 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000276 if (successor_ == nullptr) {
277 __ B(GetReturnLabel());
278 } else {
279 __ B(arm64_codegen->GetLabelOf(successor_));
280 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100281 }
282
283 vixl::Label* GetReturnLabel() {
284 DCHECK(successor_ == nullptr);
285 return &return_label_;
286 }
287
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100288 HBasicBlock* GetSuccessor() const {
289 return successor_;
290 }
291
Alexandre Rames5319def2014-10-23 10:03:10 +0100292 private:
293 HSuspendCheck* const instruction_;
294 // If not null, the block to branch to after the suspend check.
295 HBasicBlock* const successor_;
296
297 // If `successor_` is null, the label to branch to after the suspend check.
298 vixl::Label return_label_;
299
300 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
301};
302
Alexandre Rames67555f72014-11-18 10:55:16 +0000303class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
304 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000305 TypeCheckSlowPathARM64(HInstruction* instruction,
306 Location class_to_check,
307 Location object_class,
308 uint32_t dex_pc)
309 : instruction_(instruction),
310 class_to_check_(class_to_check),
311 object_class_(object_class),
312 dex_pc_(dex_pc) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000313
314 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000315 LocationSummary* locations = instruction_->GetLocations();
316 DCHECK(instruction_->IsCheckCast()
317 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
318 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
319
Alexandre Rames67555f72014-11-18 10:55:16 +0000320 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000321 SaveLiveRegisters(codegen, locations);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000322
323 // We're moving two locations to locations that could overlap, so we need a parallel
324 // move resolver.
325 InvokeRuntimeCallingConvention calling_convention;
326 codegen->EmitParallelMoves(
Nicolas Geoffray90218252015-04-15 11:56:51 +0100327 class_to_check_, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
328 object_class_, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000329
330 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000331 arm64_codegen->InvokeRuntime(
332 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_, this);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000333 Primitive::Type ret_type = instruction_->GetType();
334 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
335 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800336 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
337 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000338 } else {
339 DCHECK(instruction_->IsCheckCast());
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000340 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800341 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000342 }
343
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000344 RestoreLiveRegisters(codegen, locations);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000345 __ B(GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000346 }
347
348 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000349 HInstruction* const instruction_;
350 const Location class_to_check_;
351 const Location object_class_;
352 uint32_t dex_pc_;
353
Alexandre Rames67555f72014-11-18 10:55:16 +0000354 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
355};
356
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700357class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
358 public:
359 explicit DeoptimizationSlowPathARM64(HInstruction* instruction)
360 : instruction_(instruction) {}
361
362 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
363 __ Bind(GetEntryLabel());
364 SaveLiveRegisters(codegen, instruction_->GetLocations());
365 DCHECK(instruction_->IsDeoptimize());
366 HDeoptimize* deoptimize = instruction_->AsDeoptimize();
367 uint32_t dex_pc = deoptimize->GetDexPc();
368 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
369 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize), instruction_, dex_pc, this);
370 }
371
372 private:
373 HInstruction* const instruction_;
374 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
375};
376
Alexandre Rames5319def2014-10-23 10:03:10 +0100377#undef __
378
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100379Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100380 Location next_location;
381 if (type == Primitive::kPrimVoid) {
382 LOG(FATAL) << "Unreachable type " << type;
383 }
384
Alexandre Rames542361f2015-01-29 16:57:31 +0000385 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100386 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
387 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000388 } else if (!Primitive::IsFloatingPointType(type) &&
389 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000390 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
391 } else {
392 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000393 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
394 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100395 }
396
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000397 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000398 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100399 return next_location;
400}
401
Serban Constantinescu579885a2015-02-22 20:51:33 +0000402CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
403 const Arm64InstructionSetFeatures& isa_features,
404 const CompilerOptions& compiler_options)
Alexandre Rames5319def2014-10-23 10:03:10 +0100405 : CodeGenerator(graph,
406 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000407 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000408 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000409 callee_saved_core_registers.list(),
410 callee_saved_fp_registers.list(),
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000411 compiler_options),
Alexandre Rames5319def2014-10-23 10:03:10 +0100412 block_labels_(nullptr),
413 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000414 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000415 move_resolver_(graph->GetArena(), this),
416 isa_features_(isa_features) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000417 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000418 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000419}
Alexandre Rames5319def2014-10-23 10:03:10 +0100420
Alexandre Rames67555f72014-11-18 10:55:16 +0000421#undef __
422#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100423
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000424void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
425 // Ensure we emit the literal pool.
426 __ FinalizeCode();
427 CodeGenerator::Finalize(allocator);
428}
429
Zheng Xuad4450e2015-04-17 18:48:56 +0800430void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
431 // Note: There are 6 kinds of moves:
432 // 1. constant -> GPR/FPR (non-cycle)
433 // 2. constant -> stack (non-cycle)
434 // 3. GPR/FPR -> GPR/FPR
435 // 4. GPR/FPR -> stack
436 // 5. stack -> GPR/FPR
437 // 6. stack -> stack (non-cycle)
438 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
439 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
440 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
441 // dependency.
442 vixl_temps_.Open(GetVIXLAssembler());
443}
444
445void ParallelMoveResolverARM64::FinishEmitNativeCode() {
446 vixl_temps_.Close();
447}
448
449Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
450 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
451 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
452 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
453 Location scratch = GetScratchLocation(kind);
454 if (!scratch.Equals(Location::NoLocation())) {
455 return scratch;
456 }
457 // Allocate from VIXL temp registers.
458 if (kind == Location::kRegister) {
459 scratch = LocationFrom(vixl_temps_.AcquireX());
460 } else {
461 DCHECK(kind == Location::kFpuRegister);
462 scratch = LocationFrom(vixl_temps_.AcquireD());
463 }
464 AddScratchLocation(scratch);
465 return scratch;
466}
467
468void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
469 if (loc.IsRegister()) {
470 vixl_temps_.Release(XRegisterFrom(loc));
471 } else {
472 DCHECK(loc.IsFpuRegister());
473 vixl_temps_.Release(DRegisterFrom(loc));
474 }
475 RemoveScratchLocation(loc);
476}
477
Alexandre Rames3e69f162014-12-10 10:36:50 +0000478void ParallelMoveResolverARM64::EmitMove(size_t index) {
479 MoveOperands* move = moves_.Get(index);
480 codegen_->MoveLocation(move->GetDestination(), move->GetSource());
481}
482
Alexandre Rames5319def2014-10-23 10:03:10 +0100483void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100484 MacroAssembler* masm = GetVIXLAssembler();
485 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000486 __ Bind(&frame_entry_label_);
487
Serban Constantinescu02164b32014-11-13 14:05:07 +0000488 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
489 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100490 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000491 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000492 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000493 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000494 __ Ldr(wzr, MemOperand(temp, 0));
495 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000496 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100497
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000498 if (!HasEmptyFrame()) {
499 int frame_size = GetFrameSize();
500 // Stack layout:
501 // sp[frame_size - 8] : lr.
502 // ... : other preserved core registers.
503 // ... : other preserved fp registers.
504 // ... : reserved frame space.
505 // sp[0] : current method.
506 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100507 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +0800508 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
509 frame_size - GetCoreSpillSize());
510 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
511 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000512 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100513}
514
515void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100516 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +0100517 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000518 if (!HasEmptyFrame()) {
519 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +0800520 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
521 frame_size - FrameEntrySpillSize());
522 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
523 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000524 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100525 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000526 }
David Srbeckyc34dc932015-04-12 09:27:43 +0100527 __ Ret();
528 GetAssembler()->cfi().RestoreState();
529 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +0100530}
531
532void CodeGeneratorARM64::Bind(HBasicBlock* block) {
533 __ Bind(GetLabelOf(block));
534}
535
Alexandre Rames5319def2014-10-23 10:03:10 +0100536void CodeGeneratorARM64::Move(HInstruction* instruction,
537 Location location,
538 HInstruction* move_for) {
539 LocationSummary* locations = instruction->GetLocations();
540 if (locations != nullptr && locations->Out().Equals(location)) {
541 return;
542 }
543
544 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000545 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100546
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000547 if (instruction->IsIntConstant()
548 || instruction->IsLongConstant()
549 || instruction->IsNullConstant()) {
550 int64_t value = GetInt64ValueOf(instruction->AsConstant());
Alexandre Rames5319def2014-10-23 10:03:10 +0100551 if (location.IsRegister()) {
552 Register dst = RegisterFrom(location, type);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000553 DCHECK(((instruction->IsIntConstant() || instruction->IsNullConstant()) && dst.Is32Bits()) ||
Alexandre Rames5319def2014-10-23 10:03:10 +0100554 (instruction->IsLongConstant() && dst.Is64Bits()));
555 __ Mov(dst, value);
556 } else {
557 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000558 UseScratchRegisterScope temps(GetVIXLAssembler());
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000559 Register temp = (instruction->IsIntConstant() || instruction->IsNullConstant())
560 ? temps.AcquireW()
561 : temps.AcquireX();
Alexandre Rames5319def2014-10-23 10:03:10 +0100562 __ Mov(temp, value);
563 __ Str(temp, StackOperandFrom(location));
564 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000565 } else if (instruction->IsTemporary()) {
566 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000567 MoveLocation(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100568 } else if (instruction->IsLoadLocal()) {
569 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Rames542361f2015-01-29 16:57:31 +0000570 if (Primitive::Is64BitType(type)) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000571 MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000572 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000573 MoveLocation(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100574 }
575
576 } else {
577 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000578 MoveLocation(location, locations->Out(), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100579 }
580}
581
Alexandre Rames5319def2014-10-23 10:03:10 +0100582Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
583 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000584
Alexandre Rames5319def2014-10-23 10:03:10 +0100585 switch (type) {
586 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000587 case Primitive::kPrimInt:
588 case Primitive::kPrimFloat:
589 return Location::StackSlot(GetStackSlot(load->GetLocal()));
590
591 case Primitive::kPrimLong:
592 case Primitive::kPrimDouble:
593 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
594
Alexandre Rames5319def2014-10-23 10:03:10 +0100595 case Primitive::kPrimBoolean:
596 case Primitive::kPrimByte:
597 case Primitive::kPrimChar:
598 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100599 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100600 LOG(FATAL) << "Unexpected type " << type;
601 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000602
Alexandre Rames5319def2014-10-23 10:03:10 +0100603 LOG(FATAL) << "Unreachable";
604 return Location::NoLocation();
605}
606
607void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000608 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100609 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000610 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +0100611 vixl::Label done;
612 __ Cbz(value, &done);
613 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
614 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000615 __ Strb(card, MemOperand(card, temp.X()));
Alexandre Rames5319def2014-10-23 10:03:10 +0100616 __ Bind(&done);
617}
618
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000619void CodeGeneratorARM64::SetupBlockedRegisters(bool is_baseline) const {
620 // Blocked core registers:
621 // lr : Runtime reserved.
622 // tr : Runtime reserved.
623 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
624 // ip1 : VIXL core temp.
625 // ip0 : VIXL core temp.
626 //
627 // Blocked fp registers:
628 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +0100629 CPURegList reserved_core_registers = vixl_reserved_core_registers;
630 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100631 while (!reserved_core_registers.IsEmpty()) {
632 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
633 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000634
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000635 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +0800636 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000637 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
638 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000639
640 if (is_baseline) {
641 CPURegList reserved_core_baseline_registers = callee_saved_core_registers;
642 while (!reserved_core_baseline_registers.IsEmpty()) {
643 blocked_core_registers_[reserved_core_baseline_registers.PopLowestIndex().code()] = true;
644 }
645
646 CPURegList reserved_fp_baseline_registers = callee_saved_fp_registers;
647 while (!reserved_fp_baseline_registers.IsEmpty()) {
648 blocked_fpu_registers_[reserved_fp_baseline_registers.PopLowestIndex().code()] = true;
649 }
650 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100651}
652
653Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
654 if (type == Primitive::kPrimVoid) {
655 LOG(FATAL) << "Unreachable type " << type;
656 }
657
Alexandre Rames542361f2015-01-29 16:57:31 +0000658 if (Primitive::IsFloatingPointType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000659 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
660 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100661 return Location::FpuRegisterLocation(reg);
662 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000663 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
664 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100665 return Location::RegisterLocation(reg);
666 }
667}
668
Alexandre Rames3e69f162014-12-10 10:36:50 +0000669size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
670 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
671 __ Str(reg, MemOperand(sp, stack_index));
672 return kArm64WordSize;
673}
674
675size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
676 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
677 __ Ldr(reg, MemOperand(sp, stack_index));
678 return kArm64WordSize;
679}
680
681size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
682 FPRegister reg = FPRegister(reg_id, kDRegSize);
683 __ Str(reg, MemOperand(sp, stack_index));
684 return kArm64WordSize;
685}
686
687size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
688 FPRegister reg = FPRegister(reg_id, kDRegSize);
689 __ Ldr(reg, MemOperand(sp, stack_index));
690 return kArm64WordSize;
691}
692
Alexandre Rames5319def2014-10-23 10:03:10 +0100693void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
694 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
695}
696
697void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
698 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
699}
700
Alexandre Rames67555f72014-11-18 10:55:16 +0000701void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000702 if (constant->IsIntConstant()) {
703 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
704 } else if (constant->IsLongConstant()) {
705 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
706 } else if (constant->IsNullConstant()) {
707 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +0000708 } else if (constant->IsFloatConstant()) {
709 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
710 } else {
711 DCHECK(constant->IsDoubleConstant());
712 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
713 }
714}
715
Alexandre Rames3e69f162014-12-10 10:36:50 +0000716
717static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
718 DCHECK(constant.IsConstant());
719 HConstant* cst = constant.GetConstant();
720 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000721 // Null is mapped to a core W register, which we associate with kPrimInt.
722 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +0000723 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
724 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
725 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
726}
727
728void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000729 if (source.Equals(destination)) {
730 return;
731 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000732
733 // A valid move can always be inferred from the destination and source
734 // locations. When moving from and to a register, the argument type can be
735 // used to generate 32bit instead of 64bit moves. In debug mode we also
736 // checks the coherency of the locations and the type.
737 bool unspecified_type = (type == Primitive::kPrimVoid);
738
739 if (destination.IsRegister() || destination.IsFpuRegister()) {
740 if (unspecified_type) {
741 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
742 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000743 (src_cst != nullptr && (src_cst->IsIntConstant()
744 || src_cst->IsFloatConstant()
745 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000746 // For stack slots and 32bit constants, a 64bit type is appropriate.
747 type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +0000748 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000749 // If the source is a double stack slot or a 64bit constant, a 64bit
750 // type is appropriate. Else the source is a register, and since the
751 // type has not been specified, we chose a 64bit type to force a 64bit
752 // move.
753 type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +0000754 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000755 }
Alexandre Rames542361f2015-01-29 16:57:31 +0000756 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(type)) ||
757 (destination.IsRegister() && !Primitive::IsFloatingPointType(type)));
Alexandre Rames3e69f162014-12-10 10:36:50 +0000758 CPURegister dst = CPURegisterFrom(destination, type);
759 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
760 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
761 __ Ldr(dst, StackOperandFrom(source));
762 } else if (source.IsConstant()) {
763 DCHECK(CoherentConstantAndType(source, type));
764 MoveConstant(dst, source.GetConstant());
765 } else {
766 if (destination.IsRegister()) {
767 __ Mov(Register(dst), RegisterFrom(source, type));
768 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +0800769 DCHECK(destination.IsFpuRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000770 __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
771 }
772 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000773 } else { // The destination is not a register. It must be a stack slot.
774 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
775 if (source.IsRegister() || source.IsFpuRegister()) {
776 if (unspecified_type) {
777 if (source.IsRegister()) {
778 type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
779 } else {
780 type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
781 }
782 }
Alexandre Rames542361f2015-01-29 16:57:31 +0000783 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(type)) &&
784 (source.IsFpuRegister() == Primitive::IsFloatingPointType(type)));
Alexandre Rames3e69f162014-12-10 10:36:50 +0000785 __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
786 } else if (source.IsConstant()) {
787 DCHECK(unspecified_type || CoherentConstantAndType(source, type));
788 UseScratchRegisterScope temps(GetVIXLAssembler());
789 HConstant* src_cst = source.GetConstant();
790 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000791 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000792 temp = temps.AcquireW();
793 } else if (src_cst->IsLongConstant()) {
794 temp = temps.AcquireX();
795 } else if (src_cst->IsFloatConstant()) {
796 temp = temps.AcquireS();
797 } else {
798 DCHECK(src_cst->IsDoubleConstant());
799 temp = temps.AcquireD();
800 }
801 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000802 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000803 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +0000804 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000805 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000806 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000807 // There is generally less pressure on FP registers.
808 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000809 __ Ldr(temp, StackOperandFrom(source));
810 __ Str(temp, StackOperandFrom(destination));
811 }
812 }
813}
814
815void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000816 CPURegister dst,
817 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000818 switch (type) {
819 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +0000820 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000821 break;
822 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +0000823 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000824 break;
825 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +0000826 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000827 break;
828 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +0000829 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000830 break;
831 case Primitive::kPrimInt:
832 case Primitive::kPrimNot:
833 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000834 case Primitive::kPrimFloat:
835 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +0000836 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +0000837 __ Ldr(dst, src);
838 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000839 case Primitive::kPrimVoid:
840 LOG(FATAL) << "Unreachable type " << type;
841 }
842}
843
Calin Juravle77520bc2015-01-12 18:45:46 +0000844void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000845 CPURegister dst,
846 const MemOperand& src) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100847 MacroAssembler* masm = GetVIXLAssembler();
848 BlockPoolsScope block_pools(masm);
849 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000850 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +0000851 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000852
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000853 DCHECK(!src.IsPreIndex());
854 DCHECK(!src.IsPostIndex());
855
856 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800857 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000858 MemOperand base = MemOperand(temp_base);
859 switch (type) {
860 case Primitive::kPrimBoolean:
861 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000862 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000863 break;
864 case Primitive::kPrimByte:
865 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000866 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000867 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
868 break;
869 case Primitive::kPrimChar:
870 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000871 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000872 break;
873 case Primitive::kPrimShort:
874 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000875 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000876 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
877 break;
878 case Primitive::kPrimInt:
879 case Primitive::kPrimNot:
880 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +0000881 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000882 __ Ldar(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000883 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000884 break;
885 case Primitive::kPrimFloat:
886 case Primitive::kPrimDouble: {
887 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +0000888 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000889
890 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
891 __ Ldar(temp, base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000892 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000893 __ Fmov(FPRegister(dst), temp);
894 break;
895 }
896 case Primitive::kPrimVoid:
897 LOG(FATAL) << "Unreachable type " << type;
898 }
899}
900
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000901void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000902 CPURegister src,
903 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000904 switch (type) {
905 case Primitive::kPrimBoolean:
906 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000907 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000908 break;
909 case Primitive::kPrimChar:
910 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000911 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000912 break;
913 case Primitive::kPrimInt:
914 case Primitive::kPrimNot:
915 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000916 case Primitive::kPrimFloat:
917 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +0000918 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000919 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000920 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000921 case Primitive::kPrimVoid:
922 LOG(FATAL) << "Unreachable type " << type;
923 }
924}
925
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000926void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
927 CPURegister src,
928 const MemOperand& dst) {
929 UseScratchRegisterScope temps(GetVIXLAssembler());
930 Register temp_base = temps.AcquireX();
931
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000932 DCHECK(!dst.IsPreIndex());
933 DCHECK(!dst.IsPostIndex());
934
935 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800936 Operand op = OperandFromMemOperand(dst);
937 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000938 MemOperand base = MemOperand(temp_base);
939 switch (type) {
940 case Primitive::kPrimBoolean:
941 case Primitive::kPrimByte:
942 __ Stlrb(Register(src), base);
943 break;
944 case Primitive::kPrimChar:
945 case Primitive::kPrimShort:
946 __ Stlrh(Register(src), base);
947 break;
948 case Primitive::kPrimInt:
949 case Primitive::kPrimNot:
950 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +0000951 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000952 __ Stlr(Register(src), base);
953 break;
954 case Primitive::kPrimFloat:
955 case Primitive::kPrimDouble: {
956 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +0000957 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000958
959 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
960 __ Fmov(temp, FPRegister(src));
961 __ Stlr(temp, base);
962 break;
963 }
964 case Primitive::kPrimVoid:
965 LOG(FATAL) << "Unreachable type " << type;
966 }
967}
968
Alexandre Rames67555f72014-11-18 10:55:16 +0000969void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) {
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000970 DCHECK(RequiresCurrentMethod());
Alexandre Rames67555f72014-11-18 10:55:16 +0000971 DCHECK(current_method.IsW());
972 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
973}
974
975void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
976 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000977 uint32_t dex_pc,
978 SlowPathCode* slow_path) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100979 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +0000980 __ Ldr(lr, MemOperand(tr, entry_point_offset));
981 __ Blr(lr);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000982 if (instruction != nullptr) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000983 RecordPcInfo(instruction, dex_pc, slow_path);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000984 DCHECK(instruction->IsSuspendCheck()
985 || instruction->IsBoundsCheck()
986 || instruction->IsNullCheck()
987 || instruction->IsDivZeroCheck()
988 || !IsLeafMethod());
989 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000990}
991
992void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
993 vixl::Register class_reg) {
994 UseScratchRegisterScope temps(GetVIXLAssembler());
995 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000996 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
Serban Constantinescu579885a2015-02-22 20:51:33 +0000997 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000998
Serban Constantinescu02164b32014-11-13 14:05:07 +0000999 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu579885a2015-02-22 20:51:33 +00001000 if (use_acquire_release) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001001 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1002 __ Add(temp, class_reg, status_offset);
1003 __ Ldar(temp, HeapOperand(temp));
1004 __ Cmp(temp, mirror::Class::kStatusInitialized);
1005 __ B(lt, slow_path->GetEntryLabel());
1006 } else {
1007 __ Ldr(temp, HeapOperand(class_reg, status_offset));
1008 __ Cmp(temp, mirror::Class::kStatusInitialized);
1009 __ B(lt, slow_path->GetEntryLabel());
1010 __ Dmb(InnerShareable, BarrierReads);
1011 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001012 __ Bind(slow_path->GetExitLabel());
1013}
Alexandre Rames5319def2014-10-23 10:03:10 +01001014
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001015void InstructionCodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
1016 BarrierType type = BarrierAll;
1017
1018 switch (kind) {
1019 case MemBarrierKind::kAnyAny:
1020 case MemBarrierKind::kAnyStore: {
1021 type = BarrierAll;
1022 break;
1023 }
1024 case MemBarrierKind::kLoadAny: {
1025 type = BarrierReads;
1026 break;
1027 }
1028 case MemBarrierKind::kStoreStore: {
1029 type = BarrierWrites;
1030 break;
1031 }
1032 default:
1033 LOG(FATAL) << "Unexpected memory barrier " << kind;
1034 }
1035 __ Dmb(InnerShareable, type);
1036}
1037
Serban Constantinescu02164b32014-11-13 14:05:07 +00001038void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1039 HBasicBlock* successor) {
1040 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001041 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1042 if (slow_path == nullptr) {
1043 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1044 instruction->SetSlowPath(slow_path);
1045 codegen_->AddSlowPath(slow_path);
1046 if (successor != nullptr) {
1047 DCHECK(successor->IsLoopHeader());
1048 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1049 }
1050 } else {
1051 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1052 }
1053
Serban Constantinescu02164b32014-11-13 14:05:07 +00001054 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1055 Register temp = temps.AcquireW();
1056
1057 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1058 if (successor == nullptr) {
1059 __ Cbnz(temp, slow_path->GetEntryLabel());
1060 __ Bind(slow_path->GetReturnLabel());
1061 } else {
1062 __ Cbz(temp, codegen_->GetLabelOf(successor));
1063 __ B(slow_path->GetEntryLabel());
1064 // slow_path will return to GetLabelOf(successor).
1065 }
1066}
1067
Alexandre Rames5319def2014-10-23 10:03:10 +01001068InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1069 CodeGeneratorARM64* codegen)
1070 : HGraphVisitor(graph),
1071 assembler_(codegen->GetAssembler()),
1072 codegen_(codegen) {}
1073
1074#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001075 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001076
1077#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1078
1079enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001080 // Using a base helps identify when we hit such breakpoints.
1081 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001082#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1083 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1084#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1085};
1086
1087#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
1088 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001089 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +01001090 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1091 } \
1092 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1093 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1094 locations->SetOut(Location::Any()); \
1095 }
1096 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1097#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1098
1099#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001100#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001101
Alexandre Rames67555f72014-11-18 10:55:16 +00001102void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001103 DCHECK_EQ(instr->InputCount(), 2U);
1104 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1105 Primitive::Type type = instr->GetResultType();
1106 switch (type) {
1107 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001108 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001109 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001110 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001111 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001112 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001113
1114 case Primitive::kPrimFloat:
1115 case Primitive::kPrimDouble:
1116 locations->SetInAt(0, Location::RequiresFpuRegister());
1117 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001118 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001119 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001120
Alexandre Rames5319def2014-10-23 10:03:10 +01001121 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001122 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001123 }
1124}
1125
Alexandre Rames09a99962015-04-15 11:47:56 +01001126void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
1127 LocationSummary* locations =
1128 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1129 locations->SetInAt(0, Location::RequiresRegister());
1130 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1131 locations->SetOut(Location::RequiresFpuRegister());
1132 } else {
1133 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1134 }
1135}
1136
1137void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1138 const FieldInfo& field_info) {
1139 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001140 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001141
1142 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
1143 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
1144
1145 if (field_info.IsVolatile()) {
1146 if (use_acquire_release) {
1147 // NB: LoadAcquire will record the pc info if needed.
1148 codegen_->LoadAcquire(instruction, OutputCPURegister(instruction), field);
1149 } else {
1150 codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
1151 codegen_->MaybeRecordImplicitNullCheck(instruction);
1152 // For IRIW sequential consistency kLoadAny is not sufficient.
1153 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1154 }
1155 } else {
1156 codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
1157 codegen_->MaybeRecordImplicitNullCheck(instruction);
1158 }
1159}
1160
1161void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1162 LocationSummary* locations =
1163 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1164 locations->SetInAt(0, Location::RequiresRegister());
1165 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1166 locations->SetInAt(1, Location::RequiresFpuRegister());
1167 } else {
1168 locations->SetInAt(1, Location::RequiresRegister());
1169 }
1170}
1171
1172void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
1173 const FieldInfo& field_info) {
1174 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001175 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001176
1177 Register obj = InputRegisterAt(instruction, 0);
1178 CPURegister value = InputCPURegisterAt(instruction, 1);
1179 Offset offset = field_info.GetFieldOffset();
1180 Primitive::Type field_type = field_info.GetFieldType();
1181 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
1182
1183 if (field_info.IsVolatile()) {
1184 if (use_acquire_release) {
1185 codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
1186 codegen_->MaybeRecordImplicitNullCheck(instruction);
1187 } else {
1188 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
1189 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1190 codegen_->MaybeRecordImplicitNullCheck(instruction);
1191 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1192 }
1193 } else {
1194 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1195 codegen_->MaybeRecordImplicitNullCheck(instruction);
1196 }
1197
1198 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
1199 codegen_->MarkGCCard(obj, Register(value));
1200 }
1201}
1202
Alexandre Rames67555f72014-11-18 10:55:16 +00001203void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001204 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001205
1206 switch (type) {
1207 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001208 case Primitive::kPrimLong: {
1209 Register dst = OutputRegister(instr);
1210 Register lhs = InputRegisterAt(instr, 0);
1211 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001212 if (instr->IsAdd()) {
1213 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001214 } else if (instr->IsAnd()) {
1215 __ And(dst, lhs, rhs);
1216 } else if (instr->IsOr()) {
1217 __ Orr(dst, lhs, rhs);
1218 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001219 __ Sub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001220 } else {
1221 DCHECK(instr->IsXor());
1222 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001223 }
1224 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001225 }
1226 case Primitive::kPrimFloat:
1227 case Primitive::kPrimDouble: {
1228 FPRegister dst = OutputFPRegister(instr);
1229 FPRegister lhs = InputFPRegisterAt(instr, 0);
1230 FPRegister rhs = InputFPRegisterAt(instr, 1);
1231 if (instr->IsAdd()) {
1232 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001233 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001234 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001235 } else {
1236 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001237 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001238 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001239 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001240 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001241 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001242 }
1243}
1244
Serban Constantinescu02164b32014-11-13 14:05:07 +00001245void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1246 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1247
1248 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1249 Primitive::Type type = instr->GetResultType();
1250 switch (type) {
1251 case Primitive::kPrimInt:
1252 case Primitive::kPrimLong: {
1253 locations->SetInAt(0, Location::RequiresRegister());
1254 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1255 locations->SetOut(Location::RequiresRegister());
1256 break;
1257 }
1258 default:
1259 LOG(FATAL) << "Unexpected shift type " << type;
1260 }
1261}
1262
1263void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1264 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1265
1266 Primitive::Type type = instr->GetType();
1267 switch (type) {
1268 case Primitive::kPrimInt:
1269 case Primitive::kPrimLong: {
1270 Register dst = OutputRegister(instr);
1271 Register lhs = InputRegisterAt(instr, 0);
1272 Operand rhs = InputOperandAt(instr, 1);
1273 if (rhs.IsImmediate()) {
1274 uint32_t shift_value = (type == Primitive::kPrimInt)
1275 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1276 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1277 if (instr->IsShl()) {
1278 __ Lsl(dst, lhs, shift_value);
1279 } else if (instr->IsShr()) {
1280 __ Asr(dst, lhs, shift_value);
1281 } else {
1282 __ Lsr(dst, lhs, shift_value);
1283 }
1284 } else {
1285 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1286
1287 if (instr->IsShl()) {
1288 __ Lsl(dst, lhs, rhs_reg);
1289 } else if (instr->IsShr()) {
1290 __ Asr(dst, lhs, rhs_reg);
1291 } else {
1292 __ Lsr(dst, lhs, rhs_reg);
1293 }
1294 }
1295 break;
1296 }
1297 default:
1298 LOG(FATAL) << "Unexpected shift operation type " << type;
1299 }
1300}
1301
Alexandre Rames5319def2014-10-23 10:03:10 +01001302void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001303 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001304}
1305
1306void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001307 HandleBinaryOp(instruction);
1308}
1309
1310void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1311 HandleBinaryOp(instruction);
1312}
1313
1314void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1315 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001316}
1317
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001318void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1319 LocationSummary* locations =
1320 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1321 locations->SetInAt(0, Location::RequiresRegister());
1322 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01001323 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1324 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1325 } else {
1326 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1327 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001328}
1329
1330void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1331 LocationSummary* locations = instruction->GetLocations();
1332 Primitive::Type type = instruction->GetType();
1333 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001334 Location index = locations->InAt(1);
1335 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001336 MemOperand source = HeapOperand(obj);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001337 MacroAssembler* masm = GetVIXLAssembler();
1338 UseScratchRegisterScope temps(masm);
1339 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001340
1341 if (index.IsConstant()) {
1342 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001343 source = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001344 } else {
1345 Register temp = temps.AcquireSameSizeAs(obj);
1346 Register index_reg = RegisterFrom(index, Primitive::kPrimInt);
1347 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001348 source = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001349 }
1350
Alexandre Rames67555f72014-11-18 10:55:16 +00001351 codegen_->Load(type, OutputCPURegister(instruction), source);
Calin Juravle77520bc2015-01-12 18:45:46 +00001352 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001353}
1354
Alexandre Rames5319def2014-10-23 10:03:10 +01001355void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1356 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1357 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001358 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001359}
1360
1361void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001362 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001363 __ Ldr(OutputRegister(instruction),
1364 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
Calin Juravle77520bc2015-01-12 18:45:46 +00001365 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001366}
1367
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001368void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Alexandre Rames97833a02015-04-16 15:07:12 +01001369 if (instruction->NeedsTypeCheck()) {
1370 LocationSummary* locations =
1371 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001372 InvokeRuntimeCallingConvention calling_convention;
1373 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1374 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1375 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1376 } else {
Alexandre Rames97833a02015-04-16 15:07:12 +01001377 LocationSummary* locations =
1378 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001379 locations->SetInAt(0, Location::RequiresRegister());
1380 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01001381 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
1382 locations->SetInAt(2, Location::RequiresFpuRegister());
1383 } else {
1384 locations->SetInAt(2, Location::RequiresRegister());
1385 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001386 }
1387}
1388
1389void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1390 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01001391 LocationSummary* locations = instruction->GetLocations();
1392 bool needs_runtime_call = locations->WillCall();
1393
1394 if (needs_runtime_call) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001395 codegen_->InvokeRuntime(
1396 QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001397 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001398 } else {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001399 Register obj = InputRegisterAt(instruction, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001400 CPURegister value = InputCPURegisterAt(instruction, 2);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001401 Location index = locations->InAt(1);
1402 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001403 MemOperand destination = HeapOperand(obj);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001404 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001405 BlockPoolsScope block_pools(masm);
Alexandre Rames97833a02015-04-16 15:07:12 +01001406 {
1407 // We use a block to end the scratch scope before the write barrier, thus
1408 // freeing the temporary registers so they can be used in `MarkGCCard`.
1409 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001410
Alexandre Rames97833a02015-04-16 15:07:12 +01001411 if (index.IsConstant()) {
1412 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
1413 destination = HeapOperand(obj, offset);
1414 } else {
1415 Register temp = temps.AcquireSameSizeAs(obj);
1416 Register index_reg = InputRegisterAt(instruction, 1);
1417 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type)));
1418 destination = HeapOperand(temp, offset);
1419 }
1420
1421 codegen_->Store(value_type, value, destination);
1422 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001423 }
Alexandre Rames97833a02015-04-16 15:07:12 +01001424 if (CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue())) {
1425 codegen_->MarkGCCard(obj, value.W());
1426 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001427 }
1428}
1429
Alexandre Rames67555f72014-11-18 10:55:16 +00001430void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1431 LocationSummary* locations =
1432 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1433 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00001434 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00001435 if (instruction->HasUses()) {
1436 locations->SetOut(Location::SameAsFirstInput());
1437 }
1438}
1439
1440void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001441 LocationSummary* locations = instruction->GetLocations();
1442 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1443 instruction, locations->InAt(0), locations->InAt(1));
Alexandre Rames67555f72014-11-18 10:55:16 +00001444 codegen_->AddSlowPath(slow_path);
1445
1446 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1447 __ B(slow_path->GetEntryLabel(), hs);
1448}
1449
1450void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
1451 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1452 instruction, LocationSummary::kCallOnSlowPath);
1453 locations->SetInAt(0, Location::RequiresRegister());
1454 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001455 locations->AddTemp(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001456}
1457
1458void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001459 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +00001460 Register obj = InputRegisterAt(instruction, 0);;
1461 Register cls = InputRegisterAt(instruction, 1);;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001462 Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
Alexandre Rames67555f72014-11-18 10:55:16 +00001463
Alexandre Rames3e69f162014-12-10 10:36:50 +00001464 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1465 instruction, locations->InAt(1), LocationFrom(obj_cls), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001466 codegen_->AddSlowPath(slow_path);
1467
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01001468 // Avoid null check if we know obj is not null.
1469 if (instruction->MustDoNullCheck()) {
1470 __ Cbz(obj, slow_path->GetExitLabel());
1471 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001472 // Compare the class of `obj` with `cls`.
Alexandre Rames3e69f162014-12-10 10:36:50 +00001473 __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
1474 __ Cmp(obj_cls, cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00001475 __ B(ne, slow_path->GetEntryLabel());
1476 __ Bind(slow_path->GetExitLabel());
1477}
1478
1479void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1480 LocationSummary* locations =
1481 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1482 locations->SetInAt(0, Location::RequiresRegister());
1483 if (check->HasUses()) {
1484 locations->SetOut(Location::SameAsFirstInput());
1485 }
1486}
1487
1488void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1489 // We assume the class is not null.
1490 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1491 check->GetLoadClass(), check, check->GetDexPc(), true);
1492 codegen_->AddSlowPath(slow_path);
1493 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1494}
1495
Serban Constantinescu02164b32014-11-13 14:05:07 +00001496void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001497 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00001498 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1499 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001500 switch (in_type) {
1501 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001502 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001503 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001504 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1505 break;
1506 }
1507 case Primitive::kPrimFloat:
1508 case Primitive::kPrimDouble: {
1509 locations->SetInAt(0, Location::RequiresFpuRegister());
Alexandre Rames93415462015-02-17 15:08:20 +00001510 HInstruction* right = compare->InputAt(1);
1511 if ((right->IsFloatConstant() && (right->AsFloatConstant()->GetValue() == 0.0f)) ||
1512 (right->IsDoubleConstant() && (right->AsDoubleConstant()->GetValue() == 0.0))) {
1513 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1514 } else {
1515 locations->SetInAt(1, Location::RequiresFpuRegister());
1516 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001517 locations->SetOut(Location::RequiresRegister());
1518 break;
1519 }
1520 default:
1521 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1522 }
1523}
1524
1525void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1526 Primitive::Type in_type = compare->InputAt(0)->GetType();
1527
1528 // 0 if: left == right
1529 // 1 if: left > right
1530 // -1 if: left < right
1531 switch (in_type) {
1532 case Primitive::kPrimLong: {
1533 Register result = OutputRegister(compare);
1534 Register left = InputRegisterAt(compare, 0);
1535 Operand right = InputOperandAt(compare, 1);
1536
1537 __ Cmp(left, right);
1538 __ Cset(result, ne);
1539 __ Cneg(result, result, lt);
1540 break;
1541 }
1542 case Primitive::kPrimFloat:
1543 case Primitive::kPrimDouble: {
1544 Register result = OutputRegister(compare);
1545 FPRegister left = InputFPRegisterAt(compare, 0);
Alexandre Rames93415462015-02-17 15:08:20 +00001546 if (compare->GetLocations()->InAt(1).IsConstant()) {
1547 if (kIsDebugBuild) {
1548 HInstruction* right = compare->GetLocations()->InAt(1).GetConstant();
1549 DCHECK((right->IsFloatConstant() && (right->AsFloatConstant()->GetValue() == 0.0f)) ||
1550 (right->IsDoubleConstant() && (right->AsDoubleConstant()->GetValue() == 0.0)));
1551 }
1552 // 0.0 is the only immediate that can be encoded directly in a FCMP instruction.
1553 __ Fcmp(left, 0.0);
1554 } else {
1555 __ Fcmp(left, InputFPRegisterAt(compare, 1));
1556 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001557 if (compare->IsGtBias()) {
1558 __ Cset(result, ne);
1559 } else {
1560 __ Csetm(result, ne);
1561 }
1562 __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
Alexandre Rames5319def2014-10-23 10:03:10 +01001563 break;
1564 }
1565 default:
1566 LOG(FATAL) << "Unimplemented compare type " << in_type;
1567 }
1568}
1569
1570void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1571 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1572 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001573 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames5319def2014-10-23 10:03:10 +01001574 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001575 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001576 }
1577}
1578
1579void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1580 if (!instruction->NeedsMaterialization()) {
1581 return;
1582 }
1583
1584 LocationSummary* locations = instruction->GetLocations();
1585 Register lhs = InputRegisterAt(instruction, 0);
1586 Operand rhs = InputOperandAt(instruction, 1);
1587 Register res = RegisterFrom(locations->Out(), instruction->GetType());
1588 Condition cond = ARM64Condition(instruction->GetCondition());
1589
1590 __ Cmp(lhs, rhs);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001591 __ Cset(res, cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001592}
1593
1594#define FOR_EACH_CONDITION_INSTRUCTION(M) \
1595 M(Equal) \
1596 M(NotEqual) \
1597 M(LessThan) \
1598 M(LessThanOrEqual) \
1599 M(GreaterThan) \
1600 M(GreaterThanOrEqual)
1601#define DEFINE_CONDITION_VISITORS(Name) \
1602void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
1603void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1604FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00001605#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01001606#undef FOR_EACH_CONDITION_INSTRUCTION
1607
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001608void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1609 LocationSummary* locations =
1610 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1611 switch (div->GetResultType()) {
1612 case Primitive::kPrimInt:
1613 case Primitive::kPrimLong:
1614 locations->SetInAt(0, Location::RequiresRegister());
1615 locations->SetInAt(1, Location::RequiresRegister());
1616 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1617 break;
1618
1619 case Primitive::kPrimFloat:
1620 case Primitive::kPrimDouble:
1621 locations->SetInAt(0, Location::RequiresFpuRegister());
1622 locations->SetInAt(1, Location::RequiresFpuRegister());
1623 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1624 break;
1625
1626 default:
1627 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1628 }
1629}
1630
1631void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1632 Primitive::Type type = div->GetResultType();
1633 switch (type) {
1634 case Primitive::kPrimInt:
1635 case Primitive::kPrimLong:
1636 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
1637 break;
1638
1639 case Primitive::kPrimFloat:
1640 case Primitive::kPrimDouble:
1641 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1642 break;
1643
1644 default:
1645 LOG(FATAL) << "Unexpected div type " << type;
1646 }
1647}
1648
Alexandre Rames67555f72014-11-18 10:55:16 +00001649void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1650 LocationSummary* locations =
1651 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1652 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1653 if (instruction->HasUses()) {
1654 locations->SetOut(Location::SameAsFirstInput());
1655 }
1656}
1657
1658void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1659 SlowPathCodeARM64* slow_path =
1660 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
1661 codegen_->AddSlowPath(slow_path);
1662 Location value = instruction->GetLocations()->InAt(0);
1663
Alexandre Rames3e69f162014-12-10 10:36:50 +00001664 Primitive::Type type = instruction->GetType();
1665
1666 if ((type != Primitive::kPrimInt) && (type != Primitive::kPrimLong)) {
1667 LOG(FATAL) << "Unexpected type " << type << "for DivZeroCheck.";
1668 return;
1669 }
1670
Alexandre Rames67555f72014-11-18 10:55:16 +00001671 if (value.IsConstant()) {
1672 int64_t divisor = Int64ConstantFrom(value);
1673 if (divisor == 0) {
1674 __ B(slow_path->GetEntryLabel());
1675 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001676 // A division by a non-null constant is valid. We don't need to perform
1677 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00001678 }
1679 } else {
1680 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
1681 }
1682}
1683
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001684void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1685 LocationSummary* locations =
1686 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1687 locations->SetOut(Location::ConstantLocation(constant));
1688}
1689
1690void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1691 UNUSED(constant);
1692 // Will be generated at use site.
1693}
1694
Alexandre Rames5319def2014-10-23 10:03:10 +01001695void LocationsBuilderARM64::VisitExit(HExit* exit) {
1696 exit->SetLocations(nullptr);
1697}
1698
1699void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001700 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +01001701}
1702
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001703void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
1704 LocationSummary* locations =
1705 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1706 locations->SetOut(Location::ConstantLocation(constant));
1707}
1708
1709void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
1710 UNUSED(constant);
1711 // Will be generated at use site.
1712}
1713
Alexandre Rames5319def2014-10-23 10:03:10 +01001714void LocationsBuilderARM64::VisitGoto(HGoto* got) {
1715 got->SetLocations(nullptr);
1716}
1717
1718void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
1719 HBasicBlock* successor = got->GetSuccessor();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001720 DCHECK(!successor->IsExitBlock());
1721 HBasicBlock* block = got->GetBlock();
1722 HInstruction* previous = got->GetPrevious();
1723 HLoopInformation* info = block->GetLoopInformation();
1724
David Brazdil46e2a392015-03-16 17:31:52 +00001725 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001726 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
1727 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1728 return;
1729 }
1730 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1731 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1732 }
1733 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001734 __ B(codegen_->GetLabelOf(successor));
1735 }
1736}
1737
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001738void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
1739 vixl::Label* true_target,
1740 vixl::Label* false_target,
1741 vixl::Label* always_true_target) {
1742 HInstruction* cond = instruction->InputAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001743 HCondition* condition = cond->AsCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01001744
Serban Constantinescu02164b32014-11-13 14:05:07 +00001745 if (cond->IsIntConstant()) {
1746 int32_t cond_value = cond->AsIntConstant()->GetValue();
1747 if (cond_value == 1) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001748 if (always_true_target != nullptr) {
1749 __ B(always_true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001750 }
1751 return;
1752 } else {
1753 DCHECK_EQ(cond_value, 0);
1754 }
1755 } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001756 // The condition instruction has been materialized, compare the output to 0.
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001757 Location cond_val = instruction->GetLocations()->InAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001758 DCHECK(cond_val.IsRegister());
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001759 __ Cbnz(InputRegisterAt(instruction, 0), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001760 } else {
1761 // The condition instruction has not been materialized, use its inputs as
1762 // the comparison and its condition as the branch condition.
1763 Register lhs = InputRegisterAt(condition, 0);
1764 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001765 Condition arm64_cond = ARM64Condition(condition->GetCondition());
Alexandre Rames4388dcc2015-02-03 10:28:33 +00001766 if ((arm64_cond != gt && arm64_cond != le) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
1767 switch (arm64_cond) {
1768 case eq:
1769 __ Cbz(lhs, true_target);
1770 break;
1771 case ne:
1772 __ Cbnz(lhs, true_target);
1773 break;
1774 case lt:
1775 // Test the sign bit and branch accordingly.
1776 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, true_target);
1777 break;
1778 case ge:
1779 // Test the sign bit and branch accordingly.
1780 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, true_target);
1781 break;
1782 default:
1783 // Without the `static_cast` the compiler throws an error for
1784 // `-Werror=sign-promo`.
1785 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001786 }
1787 } else {
1788 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001789 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001790 }
1791 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001792 if (false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001793 __ B(false_target);
1794 }
1795}
1796
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001797void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
1798 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1799 HInstruction* cond = if_instr->InputAt(0);
1800 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
1801 locations->SetInAt(0, Location::RequiresRegister());
1802 }
1803}
1804
1805void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
1806 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1807 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1808 vixl::Label* always_true_target = true_target;
1809 if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
1810 if_instr->IfTrueSuccessor())) {
1811 always_true_target = nullptr;
1812 }
1813 if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
1814 if_instr->IfFalseSuccessor())) {
1815 false_target = nullptr;
1816 }
1817 GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
1818}
1819
1820void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
1821 LocationSummary* locations = new (GetGraph()->GetArena())
1822 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
1823 HInstruction* cond = deoptimize->InputAt(0);
1824 DCHECK(cond->IsCondition());
1825 if (cond->AsCondition()->NeedsMaterialization()) {
1826 locations->SetInAt(0, Location::RequiresRegister());
1827 }
1828}
1829
1830void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
1831 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
1832 DeoptimizationSlowPathARM64(deoptimize);
1833 codegen_->AddSlowPath(slow_path);
1834 vixl::Label* slow_path_entry = slow_path->GetEntryLabel();
1835 GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
1836}
1837
Alexandre Rames5319def2014-10-23 10:03:10 +01001838void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001839 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001840}
1841
1842void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001843 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01001844}
1845
1846void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001847 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001848}
1849
1850void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001851 HandleFieldSet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01001852}
1853
Alexandre Rames67555f72014-11-18 10:55:16 +00001854void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
1855 LocationSummary::CallKind call_kind =
1856 instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
1857 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
1858 locations->SetInAt(0, Location::RequiresRegister());
1859 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001860 // The output does overlap inputs.
1861 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexandre Rames67555f72014-11-18 10:55:16 +00001862}
1863
1864void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
1865 LocationSummary* locations = instruction->GetLocations();
1866 Register obj = InputRegisterAt(instruction, 0);;
1867 Register cls = InputRegisterAt(instruction, 1);;
1868 Register out = OutputRegister(instruction);
1869
1870 vixl::Label done;
1871
1872 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01001873 // Avoid null check if we know `obj` is not null.
1874 if (instruction->MustDoNullCheck()) {
1875 __ Mov(out, 0);
1876 __ Cbz(obj, &done);
1877 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001878
1879 // Compare the class of `obj` with `cls`.
Serban Constantinescu02164b32014-11-13 14:05:07 +00001880 __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
Alexandre Rames67555f72014-11-18 10:55:16 +00001881 __ Cmp(out, cls);
1882 if (instruction->IsClassFinal()) {
1883 // Classes must be equal for the instanceof to succeed.
1884 __ Cset(out, eq);
1885 } else {
1886 // If the classes are not equal, we go into a slow path.
1887 DCHECK(locations->OnlyCallsOnSlowPath());
1888 SlowPathCodeARM64* slow_path =
Alexandre Rames3e69f162014-12-10 10:36:50 +00001889 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1890 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001891 codegen_->AddSlowPath(slow_path);
1892 __ B(ne, slow_path->GetEntryLabel());
1893 __ Mov(out, 1);
1894 __ Bind(slow_path->GetExitLabel());
1895 }
1896
1897 __ Bind(&done);
1898}
1899
Alexandre Rames5319def2014-10-23 10:03:10 +01001900void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
1901 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1902 locations->SetOut(Location::ConstantLocation(constant));
1903}
1904
1905void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
1906 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001907 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001908}
1909
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001910void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
1911 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1912 locations->SetOut(Location::ConstantLocation(constant));
1913}
1914
1915void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant) {
1916 // Will be generated at use site.
1917 UNUSED(constant);
1918}
1919
Alexandre Rames5319def2014-10-23 10:03:10 +01001920void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
1921 LocationSummary* locations =
1922 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1923 locations->AddTemp(LocationFrom(x0));
1924
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001925 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillain3e3d7332015-04-28 11:00:54 +01001926 for (size_t i = 0; i < invoke->GetNumberOfArguments(); i++) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001927 HInstruction* input = invoke->InputAt(i);
1928 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1929 }
1930
1931 Primitive::Type return_type = invoke->GetType();
1932 if (return_type != Primitive::kPrimVoid) {
1933 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
1934 }
1935}
1936
Alexandre Rames67555f72014-11-18 10:55:16 +00001937void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1938 HandleInvoke(invoke);
1939}
1940
1941void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1942 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1943 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1944 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1945 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1946 Location receiver = invoke->GetLocations()->InAt(0);
1947 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00001948 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00001949
1950 // The register ip1 is required to be used for the hidden argument in
1951 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01001952 MacroAssembler* masm = GetVIXLAssembler();
1953 UseScratchRegisterScope scratch_scope(masm);
1954 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00001955 scratch_scope.Exclude(ip1);
1956 __ Mov(ip1, invoke->GetDexMethodIndex());
1957
1958 // temp = object->GetClass();
1959 if (receiver.IsStackSlot()) {
1960 __ Ldr(temp, StackOperandFrom(receiver));
1961 __ Ldr(temp, HeapOperand(temp, class_offset));
1962 } else {
1963 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
1964 }
Calin Juravle77520bc2015-01-12 18:45:46 +00001965 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00001966 // temp = temp->GetImtEntryAt(method_offset);
1967 __ Ldr(temp, HeapOperand(temp, method_offset));
1968 // lr = temp->GetEntryPoint();
1969 __ Ldr(lr, HeapOperand(temp, entry_point));
1970 // lr();
1971 __ Blr(lr);
1972 DCHECK(!codegen_->IsLeafMethod());
1973 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1974}
1975
1976void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001977 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
1978 if (intrinsic.TryDispatch(invoke)) {
1979 return;
1980 }
1981
Alexandre Rames67555f72014-11-18 10:55:16 +00001982 HandleInvoke(invoke);
1983}
1984
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001985void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain3e3d7332015-04-28 11:00:54 +01001986 // When we do not run baseline, explicit clinit checks triggered by static
1987 // invokes must have been pruned by art::PrepareForRegisterAllocation.
1988 DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001989
Andreas Gampe878d58c2015-01-15 23:24:00 -08001990 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
1991 if (intrinsic.TryDispatch(invoke)) {
1992 return;
1993 }
1994
Alexandre Rames67555f72014-11-18 10:55:16 +00001995 HandleInvoke(invoke);
1996}
1997
Andreas Gampe878d58c2015-01-15 23:24:00 -08001998static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
1999 if (invoke->GetLocations()->Intrinsified()) {
2000 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
2001 intrinsic.Dispatch(invoke);
2002 return true;
2003 }
2004 return false;
2005}
2006
2007void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
2008 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
2009 DCHECK(temp.Is(kArtMethodRegister));
Alexandre Rames5319def2014-10-23 10:03:10 +01002010 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
Andreas Gampe878d58c2015-01-15 23:24:00 -08002011 invoke->GetDexMethodIndex() * kHeapRefSize;
Alexandre Rames5319def2014-10-23 10:03:10 +01002012
2013 // TODO: Implement all kinds of calls:
2014 // 1) boot -> boot
2015 // 2) app -> boot
2016 // 3) app -> app
2017 //
2018 // Currently we implement the app -> app logic, which looks up in the resolve cache.
2019
Jeff Hao848f70a2014-01-15 13:49:50 -08002020 if (invoke->IsStringInit()) {
2021 // temp = thread->string_init_entrypoint
2022 __ Ldr(temp, HeapOperand(tr, invoke->GetStringInitOffset()));
2023 // LR = temp->entry_point_from_quick_compiled_code_;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002024 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
2025 kArm64WordSize)));
Jeff Hao848f70a2014-01-15 13:49:50 -08002026 // lr()
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002027 __ Blr(lr);
2028 } else {
Jeff Hao848f70a2014-01-15 13:49:50 -08002029 // temp = method;
2030 LoadCurrentMethod(temp);
2031 if (!invoke->IsRecursive()) {
2032 // temp = temp->dex_cache_resolved_methods_;
2033 __ Ldr(temp, HeapOperand(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset()));
2034 // temp = temp[index_in_cache];
2035 __ Ldr(temp, HeapOperand(temp, index_in_cache));
2036 // lr = temp->entry_point_from_quick_compiled_code_;
2037 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
2038 kArm64WordSize)));
2039 // lr();
2040 __ Blr(lr);
2041 } else {
2042 __ Bl(&frame_entry_label_);
2043 }
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002044 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002045
Andreas Gampe878d58c2015-01-15 23:24:00 -08002046 DCHECK(!IsLeafMethod());
2047}
2048
2049void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain3e3d7332015-04-28 11:00:54 +01002050 // When we do not run baseline, explicit clinit checks triggered by static
2051 // invokes must have been pruned by art::PrepareForRegisterAllocation.
2052 DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002053
Andreas Gampe878d58c2015-01-15 23:24:00 -08002054 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2055 return;
2056 }
2057
Alexandre Ramesd921d642015-04-16 15:07:16 +01002058 BlockPoolsScope block_pools(GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -08002059 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
2060 codegen_->GenerateStaticOrDirectCall(invoke, temp);
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002061 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01002062}
2063
2064void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08002065 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2066 return;
2067 }
2068
Alexandre Rames5319def2014-10-23 10:03:10 +01002069 LocationSummary* locations = invoke->GetLocations();
2070 Location receiver = locations->InAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002071 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002072 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
2073 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
2074 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00002075 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames5319def2014-10-23 10:03:10 +01002076
Alexandre Ramesd921d642015-04-16 15:07:16 +01002077 BlockPoolsScope block_pools(GetVIXLAssembler());
2078
Alexandre Rames5319def2014-10-23 10:03:10 +01002079 // temp = object->GetClass();
2080 if (receiver.IsStackSlot()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002081 __ Ldr(temp, MemOperand(sp, receiver.GetStackIndex()));
2082 __ Ldr(temp, HeapOperand(temp, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002083 } else {
2084 DCHECK(receiver.IsRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002085 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002086 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002087 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames5319def2014-10-23 10:03:10 +01002088 // temp = temp->GetMethodAt(method_offset);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002089 __ Ldr(temp, HeapOperand(temp, method_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002090 // lr = temp->GetEntryPoint();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002091 __ Ldr(lr, HeapOperand(temp, entry_point.SizeValue()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002092 // lr();
2093 __ Blr(lr);
2094 DCHECK(!codegen_->IsLeafMethod());
2095 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2096}
2097
Alexandre Rames67555f72014-11-18 10:55:16 +00002098void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
2099 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
2100 : LocationSummary::kNoCall;
2101 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2102 locations->SetOut(Location::RequiresRegister());
2103}
2104
2105void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
2106 Register out = OutputRegister(cls);
2107 if (cls->IsReferrersClass()) {
2108 DCHECK(!cls->CanCallRuntime());
2109 DCHECK(!cls->MustGenerateClinitCheck());
2110 codegen_->LoadCurrentMethod(out);
2111 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2112 } else {
2113 DCHECK(cls->CanCallRuntime());
2114 codegen_->LoadCurrentMethod(out);
2115 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002116 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002117
2118 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2119 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2120 codegen_->AddSlowPath(slow_path);
2121 __ Cbz(out, slow_path->GetEntryLabel());
2122 if (cls->MustGenerateClinitCheck()) {
2123 GenerateClassInitializationCheck(slow_path, out);
2124 } else {
2125 __ Bind(slow_path->GetExitLabel());
2126 }
2127 }
2128}
2129
2130void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
2131 LocationSummary* locations =
2132 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2133 locations->SetOut(Location::RequiresRegister());
2134}
2135
2136void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
2137 MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
2138 __ Ldr(OutputRegister(instruction), exception);
2139 __ Str(wzr, exception);
2140}
2141
Alexandre Rames5319def2014-10-23 10:03:10 +01002142void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
2143 load->SetLocations(nullptr);
2144}
2145
2146void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
2147 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002148 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01002149}
2150
Alexandre Rames67555f72014-11-18 10:55:16 +00002151void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
2152 LocationSummary* locations =
2153 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2154 locations->SetOut(Location::RequiresRegister());
2155}
2156
2157void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
2158 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
2159 codegen_->AddSlowPath(slow_path);
2160
2161 Register out = OutputRegister(load);
2162 codegen_->LoadCurrentMethod(out);
Mathieu Chartiereace4582014-11-24 18:29:54 -08002163 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2164 __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002165 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002166 __ Cbz(out, slow_path->GetEntryLabel());
2167 __ Bind(slow_path->GetExitLabel());
2168}
2169
Alexandre Rames5319def2014-10-23 10:03:10 +01002170void LocationsBuilderARM64::VisitLocal(HLocal* local) {
2171 local->SetLocations(nullptr);
2172}
2173
2174void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
2175 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
2176}
2177
2178void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
2179 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2180 locations->SetOut(Location::ConstantLocation(constant));
2181}
2182
2183void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
2184 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002185 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01002186}
2187
Alexandre Rames67555f72014-11-18 10:55:16 +00002188void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2189 LocationSummary* locations =
2190 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2191 InvokeRuntimeCallingConvention calling_convention;
2192 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2193}
2194
2195void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2196 codegen_->InvokeRuntime(instruction->IsEnter()
2197 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2198 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002199 instruction->GetDexPc(),
2200 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002201 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002202}
2203
Alexandre Rames42d641b2014-10-27 14:00:51 +00002204void LocationsBuilderARM64::VisitMul(HMul* mul) {
2205 LocationSummary* locations =
2206 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2207 switch (mul->GetResultType()) {
2208 case Primitive::kPrimInt:
2209 case Primitive::kPrimLong:
2210 locations->SetInAt(0, Location::RequiresRegister());
2211 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002212 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002213 break;
2214
2215 case Primitive::kPrimFloat:
2216 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002217 locations->SetInAt(0, Location::RequiresFpuRegister());
2218 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002219 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002220 break;
2221
2222 default:
2223 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2224 }
2225}
2226
2227void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
2228 switch (mul->GetResultType()) {
2229 case Primitive::kPrimInt:
2230 case Primitive::kPrimLong:
2231 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2232 break;
2233
2234 case Primitive::kPrimFloat:
2235 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002236 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00002237 break;
2238
2239 default:
2240 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2241 }
2242}
2243
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002244void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
2245 LocationSummary* locations =
2246 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2247 switch (neg->GetResultType()) {
2248 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00002249 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002250 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00002251 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002252 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002253
2254 case Primitive::kPrimFloat:
2255 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002256 locations->SetInAt(0, Location::RequiresFpuRegister());
2257 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002258 break;
2259
2260 default:
2261 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2262 }
2263}
2264
2265void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
2266 switch (neg->GetResultType()) {
2267 case Primitive::kPrimInt:
2268 case Primitive::kPrimLong:
2269 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
2270 break;
2271
2272 case Primitive::kPrimFloat:
2273 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002274 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002275 break;
2276
2277 default:
2278 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2279 }
2280}
2281
2282void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
2283 LocationSummary* locations =
2284 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2285 InvokeRuntimeCallingConvention calling_convention;
2286 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002287 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002288 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002289 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2290 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2291 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002292}
2293
2294void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
2295 LocationSummary* locations = instruction->GetLocations();
2296 InvokeRuntimeCallingConvention calling_convention;
2297 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2298 DCHECK(type_index.Is(w0));
2299 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002300 DCHECK(current_method.Is(w2));
Alexandre Rames67555f72014-11-18 10:55:16 +00002301 codegen_->LoadCurrentMethod(current_method);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002302 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002303 codegen_->InvokeRuntime(
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +00002304 GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
2305 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002306 instruction->GetDexPc(),
2307 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002308 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2309 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002310}
2311
Alexandre Rames5319def2014-10-23 10:03:10 +01002312void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
2313 LocationSummary* locations =
2314 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2315 InvokeRuntimeCallingConvention calling_convention;
2316 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2317 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2318 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002319 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002320}
2321
2322void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
2323 LocationSummary* locations = instruction->GetLocations();
2324 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2325 DCHECK(type_index.Is(w0));
2326 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2327 DCHECK(current_method.Is(w1));
Alexandre Rames67555f72014-11-18 10:55:16 +00002328 codegen_->LoadCurrentMethod(current_method);
Alexandre Rames5319def2014-10-23 10:03:10 +01002329 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002330 codegen_->InvokeRuntime(
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +00002331 GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
2332 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002333 instruction->GetDexPc(),
2334 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002335 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002336}
2337
2338void LocationsBuilderARM64::VisitNot(HNot* instruction) {
2339 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00002340 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002341 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002342}
2343
2344void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00002345 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002346 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002347 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01002348 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002349 break;
2350
2351 default:
2352 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
2353 }
2354}
2355
David Brazdil66d126e2015-04-03 16:02:44 +01002356void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
2357 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2358 locations->SetInAt(0, Location::RequiresRegister());
2359 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2360}
2361
2362void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01002363 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
2364}
2365
Alexandre Rames5319def2014-10-23 10:03:10 +01002366void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
2367 LocationSummary* locations =
2368 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2369 locations->SetInAt(0, Location::RequiresRegister());
2370 if (instruction->HasUses()) {
2371 locations->SetOut(Location::SameAsFirstInput());
2372 }
2373}
2374
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002375void InstructionCodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00002376 if (codegen_->CanMoveNullCheckToUser(instruction)) {
2377 return;
2378 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002379
Alexandre Ramesd921d642015-04-16 15:07:16 +01002380 BlockPoolsScope block_pools(GetVIXLAssembler());
2381 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002382 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
2383 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2384}
2385
2386void InstructionCodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002387 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
2388 codegen_->AddSlowPath(slow_path);
2389
2390 LocationSummary* locations = instruction->GetLocations();
2391 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00002392
2393 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01002394}
2395
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002396void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
2397 if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2398 GenerateImplicitNullCheck(instruction);
2399 } else {
2400 GenerateExplicitNullCheck(instruction);
2401 }
2402}
2403
Alexandre Rames67555f72014-11-18 10:55:16 +00002404void LocationsBuilderARM64::VisitOr(HOr* instruction) {
2405 HandleBinaryOp(instruction);
2406}
2407
2408void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
2409 HandleBinaryOp(instruction);
2410}
2411
Alexandre Rames3e69f162014-12-10 10:36:50 +00002412void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2413 LOG(FATAL) << "Unreachable";
2414}
2415
2416void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
2417 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2418}
2419
Alexandre Rames5319def2014-10-23 10:03:10 +01002420void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
2421 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2422 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2423 if (location.IsStackSlot()) {
2424 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2425 } else if (location.IsDoubleStackSlot()) {
2426 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2427 }
2428 locations->SetOut(location);
2429}
2430
2431void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
2432 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002433 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002434}
2435
2436void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
2437 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2438 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2439 locations->SetInAt(i, Location::Any());
2440 }
2441 locations->SetOut(Location::Any());
2442}
2443
2444void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002445 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002446 LOG(FATAL) << "Unreachable";
2447}
2448
Serban Constantinescu02164b32014-11-13 14:05:07 +00002449void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002450 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00002451 LocationSummary::CallKind call_kind =
2452 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002453 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2454
2455 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002456 case Primitive::kPrimInt:
2457 case Primitive::kPrimLong:
2458 locations->SetInAt(0, Location::RequiresRegister());
2459 locations->SetInAt(1, Location::RequiresRegister());
2460 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2461 break;
2462
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002463 case Primitive::kPrimFloat:
2464 case Primitive::kPrimDouble: {
2465 InvokeRuntimeCallingConvention calling_convention;
2466 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
2467 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
2468 locations->SetOut(calling_convention.GetReturnLocation(type));
2469
2470 break;
2471 }
2472
Serban Constantinescu02164b32014-11-13 14:05:07 +00002473 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002474 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00002475 }
2476}
2477
2478void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
2479 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002480
Serban Constantinescu02164b32014-11-13 14:05:07 +00002481 switch (type) {
2482 case Primitive::kPrimInt:
2483 case Primitive::kPrimLong: {
2484 UseScratchRegisterScope temps(GetVIXLAssembler());
2485 Register dividend = InputRegisterAt(rem, 0);
2486 Register divisor = InputRegisterAt(rem, 1);
2487 Register output = OutputRegister(rem);
2488 Register temp = temps.AcquireSameSizeAs(output);
2489
2490 __ Sdiv(temp, dividend, divisor);
2491 __ Msub(output, temp, divisor, dividend);
2492 break;
2493 }
2494
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002495 case Primitive::kPrimFloat:
2496 case Primitive::kPrimDouble: {
2497 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
2498 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002499 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002500 break;
2501 }
2502
Serban Constantinescu02164b32014-11-13 14:05:07 +00002503 default:
2504 LOG(FATAL) << "Unexpected rem type " << type;
2505 }
2506}
2507
Calin Juravle27df7582015-04-17 19:12:31 +01002508void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2509 memory_barrier->SetLocations(nullptr);
2510}
2511
2512void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2513 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
2514}
2515
Alexandre Rames5319def2014-10-23 10:03:10 +01002516void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
2517 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2518 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002519 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01002520}
2521
2522void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002523 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002524 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01002525}
2526
2527void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
2528 instruction->SetLocations(nullptr);
2529}
2530
2531void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002532 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002533 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01002534}
2535
Serban Constantinescu02164b32014-11-13 14:05:07 +00002536void LocationsBuilderARM64::VisitShl(HShl* shl) {
2537 HandleShift(shl);
2538}
2539
2540void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
2541 HandleShift(shl);
2542}
2543
2544void LocationsBuilderARM64::VisitShr(HShr* shr) {
2545 HandleShift(shr);
2546}
2547
2548void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
2549 HandleShift(shr);
2550}
2551
Alexandre Rames5319def2014-10-23 10:03:10 +01002552void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
2553 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
2554 Primitive::Type field_type = store->InputAt(1)->GetType();
2555 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002556 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01002557 case Primitive::kPrimBoolean:
2558 case Primitive::kPrimByte:
2559 case Primitive::kPrimChar:
2560 case Primitive::kPrimShort:
2561 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002562 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01002563 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
2564 break;
2565
2566 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002567 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01002568 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
2569 break;
2570
2571 default:
2572 LOG(FATAL) << "Unimplemented local type " << field_type;
2573 }
2574}
2575
2576void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002577 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01002578}
2579
2580void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002581 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002582}
2583
2584void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002585 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002586}
2587
Alexandre Rames67555f72014-11-18 10:55:16 +00002588void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002589 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002590}
2591
2592void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002593 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00002594}
2595
2596void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002597 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002598}
2599
Alexandre Rames67555f72014-11-18 10:55:16 +00002600void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002601 HandleFieldSet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01002602}
2603
2604void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2605 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2606}
2607
2608void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002609 HBasicBlock* block = instruction->GetBlock();
2610 if (block->GetLoopInformation() != nullptr) {
2611 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2612 // The back edge will generate the suspend check.
2613 return;
2614 }
2615 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2616 // The goto will generate the suspend check.
2617 return;
2618 }
2619 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01002620}
2621
2622void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
2623 temp->SetLocations(nullptr);
2624}
2625
2626void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
2627 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002628 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01002629}
2630
Alexandre Rames67555f72014-11-18 10:55:16 +00002631void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
2632 LocationSummary* locations =
2633 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2634 InvokeRuntimeCallingConvention calling_convention;
2635 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2636}
2637
2638void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
2639 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002640 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002641 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002642}
2643
2644void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
2645 LocationSummary* locations =
2646 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2647 Primitive::Type input_type = conversion->GetInputType();
2648 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002649 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00002650 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
2651 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
2652 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
2653 }
2654
Alexandre Rames542361f2015-01-29 16:57:31 +00002655 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002656 locations->SetInAt(0, Location::RequiresFpuRegister());
2657 } else {
2658 locations->SetInAt(0, Location::RequiresRegister());
2659 }
2660
Alexandre Rames542361f2015-01-29 16:57:31 +00002661 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002662 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2663 } else {
2664 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2665 }
2666}
2667
2668void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
2669 Primitive::Type result_type = conversion->GetResultType();
2670 Primitive::Type input_type = conversion->GetInputType();
2671
2672 DCHECK_NE(input_type, result_type);
2673
Alexandre Rames542361f2015-01-29 16:57:31 +00002674 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002675 int result_size = Primitive::ComponentSize(result_type);
2676 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002677 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002678 Register output = OutputRegister(conversion);
2679 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002680 if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
2681 __ Ubfx(output, source, 0, result_size * kBitsPerByte);
2682 } else if ((result_type == Primitive::kPrimChar) ||
2683 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
2684 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002685 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002686 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002687 }
Alexandre Rames542361f2015-01-29 16:57:31 +00002688 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002689 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00002690 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002691 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
2692 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00002693 } else if (Primitive::IsFloatingPointType(result_type) &&
2694 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002695 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
2696 } else {
2697 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
2698 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00002699 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002700}
Alexandre Rames67555f72014-11-18 10:55:16 +00002701
Serban Constantinescu02164b32014-11-13 14:05:07 +00002702void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
2703 HandleShift(ushr);
2704}
2705
2706void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
2707 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00002708}
2709
2710void LocationsBuilderARM64::VisitXor(HXor* instruction) {
2711 HandleBinaryOp(instruction);
2712}
2713
2714void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
2715 HandleBinaryOp(instruction);
2716}
2717
Calin Juravleb1498f62015-02-16 13:13:29 +00002718void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction) {
2719 // Nothing to do, this should be removed during prepare for register allocator.
2720 UNUSED(instruction);
2721 LOG(FATAL) << "Unreachable";
2722}
2723
2724void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction) {
2725 // Nothing to do, this should be removed during prepare for register allocator.
2726 UNUSED(instruction);
2727 LOG(FATAL) << "Unreachable";
2728}
2729
Alexandre Rames67555f72014-11-18 10:55:16 +00002730#undef __
2731#undef QUICK_ENTRY_POINT
2732
Alexandre Rames5319def2014-10-23 10:03:10 +01002733} // namespace arm64
2734} // namespace art