blob: c1bce2a07776d3abf9a3f2c59a9492f4a4673d56 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080020#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010021#include "gc/accounting/card_table.h"
22#include "mirror/array-inl.h"
23#include "mirror/art_method.h"
24#include "mirror/class.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000025#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010026#include "thread.h"
27#include "utils/arm64/assembler_arm64.h"
28#include "utils/assembler.h"
29#include "utils/stack_checks.h"
30
31
32using namespace vixl; // NOLINT(build/namespaces)
33
34#ifdef __
35#error "ARM64 Codegen VIXL macro-assembler macro already defined."
36#endif
37
38
39namespace art {
40
41namespace arm64 {
42
Serban Constantinescu02d81cc2015-01-05 16:08:49 +000043// TODO: Tune the use of Load-Acquire, Store-Release vs Data Memory Barriers.
44// For now we prefer the use of load-acquire, store-release over explicit memory barriers.
45static constexpr bool kUseAcquireRelease = true;
Serban Constantinescu02164b32014-11-13 14:05:07 +000046static constexpr bool kExplicitStackOverflowCheck = false;
Alexandre Rames5319def2014-10-23 10:03:10 +010047static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
48static constexpr int kCurrentMethodStackOffset = 0;
49
50namespace {
Alexandre Ramesa89086e2014-11-07 17:13:25 +000051
52bool IsFPType(Primitive::Type type) {
53 return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble;
54}
55
Alexandre Rames67555f72014-11-18 10:55:16 +000056bool IsIntegralType(Primitive::Type type) {
57 switch (type) {
58 case Primitive::kPrimByte:
59 case Primitive::kPrimChar:
60 case Primitive::kPrimShort:
61 case Primitive::kPrimInt:
62 case Primitive::kPrimLong:
63 return true;
64 default:
65 return false;
66 }
67}
68
Alexandre Ramesa89086e2014-11-07 17:13:25 +000069bool Is64BitType(Primitive::Type type) {
70 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
71}
72
Alexandre Rames5319def2014-10-23 10:03:10 +010073// Convenience helpers to ease conversion to and from VIXL operands.
Alexandre Rames67555f72014-11-18 10:55:16 +000074static_assert((SP == 31) && (WSP == 31) && (XZR == 32) && (WZR == 32),
75 "Unexpected values for register codes.");
Alexandre Rames5319def2014-10-23 10:03:10 +010076
77int VIXLRegCodeFromART(int code) {
Alexandre Rames5319def2014-10-23 10:03:10 +010078 if (code == SP) {
79 return vixl::kSPRegInternalCode;
80 }
81 if (code == XZR) {
82 return vixl::kZeroRegCode;
83 }
84 return code;
85}
86
87int ARTRegCodeFromVIXL(int code) {
Alexandre Rames5319def2014-10-23 10:03:10 +010088 if (code == vixl::kSPRegInternalCode) {
89 return SP;
90 }
91 if (code == vixl::kZeroRegCode) {
92 return XZR;
93 }
94 return code;
95}
96
97Register XRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +000098 DCHECK(location.IsRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +010099 return Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
100}
101
102Register WRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000103 DCHECK(location.IsRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +0100104 return Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
105}
106
107Register RegisterFrom(Location location, Primitive::Type type) {
108 DCHECK(type != Primitive::kPrimVoid && !IsFPType(type));
109 return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
110}
111
112Register OutputRegister(HInstruction* instr) {
113 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
114}
115
116Register InputRegisterAt(HInstruction* instr, int input_index) {
117 return RegisterFrom(instr->GetLocations()->InAt(input_index),
118 instr->InputAt(input_index)->GetType());
119}
120
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000121FPRegister DRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000122 DCHECK(location.IsFpuRegister());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000123 return FPRegister::DRegFromCode(location.reg());
124}
125
126FPRegister SRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000127 DCHECK(location.IsFpuRegister());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000128 return FPRegister::SRegFromCode(location.reg());
129}
130
131FPRegister FPRegisterFrom(Location location, Primitive::Type type) {
132 DCHECK(IsFPType(type));
133 return type == Primitive::kPrimDouble ? DRegisterFrom(location) : SRegisterFrom(location);
134}
135
136FPRegister OutputFPRegister(HInstruction* instr) {
137 return FPRegisterFrom(instr->GetLocations()->Out(), instr->GetType());
138}
139
140FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) {
141 return FPRegisterFrom(instr->GetLocations()->InAt(input_index),
142 instr->InputAt(input_index)->GetType());
143}
144
Alexandre Rames3e69f162014-12-10 10:36:50 +0000145CPURegister CPURegisterFrom(Location location, Primitive::Type type) {
146 return IsFPType(type) ? CPURegister(FPRegisterFrom(location, type))
147 : CPURegister(RegisterFrom(location, type));
148}
149
Alexandre Rames67555f72014-11-18 10:55:16 +0000150CPURegister OutputCPURegister(HInstruction* instr) {
151 return IsFPType(instr->GetType()) ? static_cast<CPURegister>(OutputFPRegister(instr))
152 : static_cast<CPURegister>(OutputRegister(instr));
153}
154
155CPURegister InputCPURegisterAt(HInstruction* instr, int index) {
156 return IsFPType(instr->InputAt(index)->GetType())
157 ? static_cast<CPURegister>(InputFPRegisterAt(instr, index))
158 : static_cast<CPURegister>(InputRegisterAt(instr, index));
159}
160
Alexandre Rames5319def2014-10-23 10:03:10 +0100161int64_t Int64ConstantFrom(Location location) {
162 HConstant* instr = location.GetConstant();
163 return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue()
164 : instr->AsLongConstant()->GetValue();
165}
166
167Operand OperandFrom(Location location, Primitive::Type type) {
168 if (location.IsRegister()) {
169 return Operand(RegisterFrom(location, type));
170 } else {
171 return Operand(Int64ConstantFrom(location));
172 }
173}
174
175Operand InputOperandAt(HInstruction* instr, int input_index) {
176 return OperandFrom(instr->GetLocations()->InAt(input_index),
177 instr->InputAt(input_index)->GetType());
178}
179
180MemOperand StackOperandFrom(Location location) {
181 return MemOperand(sp, location.GetStackIndex());
182}
183
Serban Constantinescu02164b32014-11-13 14:05:07 +0000184MemOperand HeapOperand(const Register& base, size_t offset = 0) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100185 // A heap reference must be 32bit, so fit in a W register.
186 DCHECK(base.IsW());
Alexandre Rames67555f72014-11-18 10:55:16 +0000187 return MemOperand(base.X(), offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100188}
189
Alexandre Rames67555f72014-11-18 10:55:16 +0000190MemOperand HeapOperand(const Register& base, Offset offset) {
191 return HeapOperand(base, offset.SizeValue());
192}
193
194MemOperand HeapOperandFrom(Location location, Offset offset) {
195 return HeapOperand(RegisterFrom(location, Primitive::kPrimNot), offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100196}
197
198Location LocationFrom(const Register& reg) {
199 return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
200}
201
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000202Location LocationFrom(const FPRegister& fpreg) {
203 return Location::FpuRegisterLocation(fpreg.code());
204}
205
Alexandre Rames5319def2014-10-23 10:03:10 +0100206} // namespace
207
208inline Condition ARM64Condition(IfCondition cond) {
209 switch (cond) {
210 case kCondEQ: return eq;
211 case kCondNE: return ne;
212 case kCondLT: return lt;
213 case kCondLE: return le;
214 case kCondGT: return gt;
215 case kCondGE: return ge;
216 default:
217 LOG(FATAL) << "Unknown if condition";
218 }
219 return nv; // Unreachable.
220}
221
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000222Location ARM64ReturnLocation(Primitive::Type return_type) {
223 DCHECK_NE(return_type, Primitive::kPrimVoid);
224 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
225 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
226 // but we use the exact registers for clarity.
227 if (return_type == Primitive::kPrimFloat) {
228 return LocationFrom(s0);
229 } else if (return_type == Primitive::kPrimDouble) {
230 return LocationFrom(d0);
231 } else if (return_type == Primitive::kPrimLong) {
232 return LocationFrom(x0);
233 } else {
234 return LocationFrom(w0);
235 }
236}
237
Alexandre Rames5319def2014-10-23 10:03:10 +0100238static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
239static constexpr size_t kRuntimeParameterCoreRegistersLength =
240 arraysize(kRuntimeParameterCoreRegisters);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000241static const FPRegister kRuntimeParameterFpuRegisters[] = { d0, d1, d2, d3, d4, d5, d6, d7 };
242static constexpr size_t kRuntimeParameterFpuRegistersLength =
243 arraysize(kRuntimeParameterCoreRegisters);
Alexandre Rames5319def2014-10-23 10:03:10 +0100244
245class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
246 public:
247 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
248
249 InvokeRuntimeCallingConvention()
250 : CallingConvention(kRuntimeParameterCoreRegisters,
251 kRuntimeParameterCoreRegistersLength,
252 kRuntimeParameterFpuRegisters,
253 kRuntimeParameterFpuRegistersLength) {}
254
255 Location GetReturnLocation(Primitive::Type return_type);
256
257 private:
258 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
259};
260
261Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000262 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100263}
264
Alexandre Rames67555f72014-11-18 10:55:16 +0000265#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
266#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100267
268class SlowPathCodeARM64 : public SlowPathCode {
269 public:
270 SlowPathCodeARM64() : entry_label_(), exit_label_() {}
271
272 vixl::Label* GetEntryLabel() { return &entry_label_; }
273 vixl::Label* GetExitLabel() { return &exit_label_; }
274
275 private:
276 vixl::Label entry_label_;
277 vixl::Label exit_label_;
278
279 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
280};
281
282class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
283 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000284 BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
285 Location index_location,
286 Location length_location)
287 : instruction_(instruction),
288 index_location_(index_location),
289 length_location_(length_location) {}
290
Alexandre Rames5319def2014-10-23 10:03:10 +0100291
Alexandre Rames67555f72014-11-18 10:55:16 +0000292 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000293 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100294 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000295 // We're moving two locations to locations that could overlap, so we need a parallel
296 // move resolver.
297 InvokeRuntimeCallingConvention calling_convention;
298 codegen->EmitParallelMoves(
299 index_location_, LocationFrom(calling_convention.GetRegisterAt(0)),
300 length_location_, LocationFrom(calling_convention.GetRegisterAt(1)));
301 arm64_codegen->InvokeRuntime(
302 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800303 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100304 }
305
306 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000307 HBoundsCheck* const instruction_;
308 const Location index_location_;
309 const Location length_location_;
310
Alexandre Rames5319def2014-10-23 10:03:10 +0100311 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
312};
313
Alexandre Rames67555f72014-11-18 10:55:16 +0000314class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
315 public:
316 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
317
318 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
319 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
320 __ Bind(GetEntryLabel());
321 arm64_codegen->InvokeRuntime(
322 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800323 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000324 }
325
326 private:
327 HDivZeroCheck* const instruction_;
328 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
329};
330
331class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
332 public:
333 LoadClassSlowPathARM64(HLoadClass* cls,
334 HInstruction* at,
335 uint32_t dex_pc,
336 bool do_clinit)
337 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
338 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
339 }
340
341 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
342 LocationSummary* locations = at_->GetLocations();
343 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
344
345 __ Bind(GetEntryLabel());
346 codegen->SaveLiveRegisters(locations);
347
348 InvokeRuntimeCallingConvention calling_convention;
349 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
350 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
351 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
352 : QUICK_ENTRY_POINT(pInitializeType);
353 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800354 if (do_clinit_) {
355 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t, mirror::ArtMethod*>();
356 } else {
357 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t, mirror::ArtMethod*>();
358 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000359
360 // Move the class to the desired location.
361 Location out = locations->Out();
362 if (out.IsValid()) {
363 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
364 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000365 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000366 }
367
368 codegen->RestoreLiveRegisters(locations);
369 __ B(GetExitLabel());
370 }
371
372 private:
373 // The class this slow path will load.
374 HLoadClass* const cls_;
375
376 // The instruction where this slow path is happening.
377 // (Might be the load class or an initialization check).
378 HInstruction* const at_;
379
380 // The dex PC of `at_`.
381 const uint32_t dex_pc_;
382
383 // Whether to initialize the class.
384 const bool do_clinit_;
385
386 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
387};
388
389class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
390 public:
391 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
392
393 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
394 LocationSummary* locations = instruction_->GetLocations();
395 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
396 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
397
398 __ Bind(GetEntryLabel());
399 codegen->SaveLiveRegisters(locations);
400
401 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800402 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
403 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000404 arm64_codegen->InvokeRuntime(
405 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800406 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000407 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000408 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000409
410 codegen->RestoreLiveRegisters(locations);
411 __ B(GetExitLabel());
412 }
413
414 private:
415 HLoadString* const instruction_;
416
417 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
418};
419
Alexandre Rames5319def2014-10-23 10:03:10 +0100420class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
421 public:
422 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
423
Alexandre Rames67555f72014-11-18 10:55:16 +0000424 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
425 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100426 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000427 arm64_codegen->InvokeRuntime(
428 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800429 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100430 }
431
432 private:
433 HNullCheck* const instruction_;
434
435 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
436};
437
Serban Constantinescu02164b32014-11-13 14:05:07 +0000438class StackOverflowCheckSlowPathARM64 : public SlowPathCodeARM64 {
439 public:
440 StackOverflowCheckSlowPathARM64() {}
441
442 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
443 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
444 __ Bind(GetEntryLabel());
445 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowStackOverflow), nullptr, 0);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800446 CheckEntrypointTypes<kQuickThrowStackOverflow, void, void>();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000447 }
448
449 private:
450 DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM64);
451};
452
Alexandre Rames5319def2014-10-23 10:03:10 +0100453class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
454 public:
455 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
456 HBasicBlock* successor)
457 : instruction_(instruction), successor_(successor) {}
458
Alexandre Rames67555f72014-11-18 10:55:16 +0000459 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
460 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100461 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000462 codegen->SaveLiveRegisters(instruction_->GetLocations());
463 arm64_codegen->InvokeRuntime(
464 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800465 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000466 codegen->RestoreLiveRegisters(instruction_->GetLocations());
467 if (successor_ == nullptr) {
468 __ B(GetReturnLabel());
469 } else {
470 __ B(arm64_codegen->GetLabelOf(successor_));
471 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100472 }
473
474 vixl::Label* GetReturnLabel() {
475 DCHECK(successor_ == nullptr);
476 return &return_label_;
477 }
478
Alexandre Rames5319def2014-10-23 10:03:10 +0100479 private:
480 HSuspendCheck* const instruction_;
481 // If not null, the block to branch to after the suspend check.
482 HBasicBlock* const successor_;
483
484 // If `successor_` is null, the label to branch to after the suspend check.
485 vixl::Label return_label_;
486
487 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
488};
489
Alexandre Rames67555f72014-11-18 10:55:16 +0000490class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
491 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000492 TypeCheckSlowPathARM64(HInstruction* instruction,
493 Location class_to_check,
494 Location object_class,
495 uint32_t dex_pc)
496 : instruction_(instruction),
497 class_to_check_(class_to_check),
498 object_class_(object_class),
499 dex_pc_(dex_pc) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000500
501 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000502 LocationSummary* locations = instruction_->GetLocations();
503 DCHECK(instruction_->IsCheckCast()
504 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
505 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
506
Alexandre Rames67555f72014-11-18 10:55:16 +0000507 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000508 codegen->SaveLiveRegisters(locations);
509
510 // We're moving two locations to locations that could overlap, so we need a parallel
511 // move resolver.
512 InvokeRuntimeCallingConvention calling_convention;
513 codegen->EmitParallelMoves(
514 class_to_check_, LocationFrom(calling_convention.GetRegisterAt(0)),
515 object_class_, LocationFrom(calling_convention.GetRegisterAt(1)));
516
517 if (instruction_->IsInstanceOf()) {
518 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
519 Primitive::Type ret_type = instruction_->GetType();
520 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
521 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800522 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
523 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000524 } else {
525 DCHECK(instruction_->IsCheckCast());
526 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800527 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000528 }
529
530 codegen->RestoreLiveRegisters(locations);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000531 __ B(GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000532 }
533
534 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000535 HInstruction* const instruction_;
536 const Location class_to_check_;
537 const Location object_class_;
538 uint32_t dex_pc_;
539
Alexandre Rames67555f72014-11-18 10:55:16 +0000540 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
541};
542
Alexandre Rames5319def2014-10-23 10:03:10 +0100543#undef __
544
545Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
546 Location next_location;
547 if (type == Primitive::kPrimVoid) {
548 LOG(FATAL) << "Unreachable type " << type;
549 }
550
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000551 if (IsFPType(type) && (fp_index_ < calling_convention.GetNumberOfFpuRegisters())) {
552 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(fp_index_++));
553 } else if (!IsFPType(type) && (gp_index_ < calling_convention.GetNumberOfRegisters())) {
554 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
555 } else {
556 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
557 next_location = Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
558 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100559 }
560
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000561 // Space on the stack is reserved for all arguments.
562 stack_index_ += Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100563 return next_location;
564}
565
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000566CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph, const CompilerOptions& compiler_options)
Alexandre Rames5319def2014-10-23 10:03:10 +0100567 : CodeGenerator(graph,
568 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000569 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000570 kNumberOfAllocatableRegisterPairs,
571 compiler_options),
Alexandre Rames5319def2014-10-23 10:03:10 +0100572 block_labels_(nullptr),
573 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000574 instruction_visitor_(graph, this),
575 move_resolver_(graph->GetArena(), this) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100576
Alexandre Rames67555f72014-11-18 10:55:16 +0000577#undef __
578#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100579
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000580void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
581 // Ensure we emit the literal pool.
582 __ FinalizeCode();
583 CodeGenerator::Finalize(allocator);
584}
585
Alexandre Rames3e69f162014-12-10 10:36:50 +0000586void ParallelMoveResolverARM64::EmitMove(size_t index) {
587 MoveOperands* move = moves_.Get(index);
588 codegen_->MoveLocation(move->GetDestination(), move->GetSource());
589}
590
591void ParallelMoveResolverARM64::EmitSwap(size_t index) {
592 MoveOperands* move = moves_.Get(index);
593 codegen_->SwapLocations(move->GetDestination(), move->GetSource());
594}
595
596void ParallelMoveResolverARM64::RestoreScratch(int reg) {
597 __ Pop(Register(VIXLRegCodeFromART(reg), kXRegSize));
598}
599
600void ParallelMoveResolverARM64::SpillScratch(int reg) {
601 __ Push(Register(VIXLRegCodeFromART(reg), kXRegSize));
602}
603
Alexandre Rames5319def2014-10-23 10:03:10 +0100604void CodeGeneratorARM64::GenerateFrameEntry() {
Serban Constantinescu02164b32014-11-13 14:05:07 +0000605 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
606 if (do_overflow_check) {
607 UseScratchRegisterScope temps(GetVIXLAssembler());
608 Register temp = temps.AcquireX();
609 if (kExplicitStackOverflowCheck) {
610 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM64();
611 AddSlowPath(slow_path);
612
613 __ Ldr(temp, MemOperand(tr, Thread::StackEndOffset<kArm64WordSize>().Int32Value()));
614 __ Cmp(sp, temp);
615 __ B(lo, slow_path->GetEntryLabel());
616 } else {
617 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
618 __ Ldr(wzr, MemOperand(temp, 0));
619 RecordPcInfo(nullptr, 0);
620 }
621 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100622
623 CPURegList preserved_regs = GetFramePreservedRegisters();
624 int frame_size = GetFrameSize();
625 core_spill_mask_ |= preserved_regs.list();
626
627 __ Str(w0, MemOperand(sp, -frame_size, PreIndex));
628 __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
629
630 // Stack layout:
631 // sp[frame_size - 8] : lr.
632 // ... : other preserved registers.
633 // sp[frame_size - regs_size]: first preserved register.
634 // ... : reserved frame space.
Alexandre Rames67555f72014-11-18 10:55:16 +0000635 // sp[0] : current method.
Alexandre Rames5319def2014-10-23 10:03:10 +0100636}
637
638void CodeGeneratorARM64::GenerateFrameExit() {
639 int frame_size = GetFrameSize();
640 CPURegList preserved_regs = GetFramePreservedRegisters();
641 __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
642 __ Drop(frame_size);
643}
644
645void CodeGeneratorARM64::Bind(HBasicBlock* block) {
646 __ Bind(GetLabelOf(block));
647}
648
Alexandre Rames5319def2014-10-23 10:03:10 +0100649void CodeGeneratorARM64::Move(HInstruction* instruction,
650 Location location,
651 HInstruction* move_for) {
652 LocationSummary* locations = instruction->GetLocations();
653 if (locations != nullptr && locations->Out().Equals(location)) {
654 return;
655 }
656
657 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000658 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100659
660 if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
661 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
662 : instruction->AsLongConstant()->GetValue();
663 if (location.IsRegister()) {
664 Register dst = RegisterFrom(location, type);
665 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
666 (instruction->IsLongConstant() && dst.Is64Bits()));
667 __ Mov(dst, value);
668 } else {
669 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000670 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100671 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
672 __ Mov(temp, value);
673 __ Str(temp, StackOperandFrom(location));
674 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000675 } else if (instruction->IsTemporary()) {
676 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000677 MoveLocation(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100678 } else if (instruction->IsLoadLocal()) {
679 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000680 if (Is64BitType(type)) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000681 MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000682 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000683 MoveLocation(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100684 }
685
686 } else {
687 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000688 MoveLocation(location, locations->Out(), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100689 }
690}
691
692size_t CodeGeneratorARM64::FrameEntrySpillSize() const {
693 return GetFramePreservedRegistersSize();
694}
695
696Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
697 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000698
Alexandre Rames5319def2014-10-23 10:03:10 +0100699 switch (type) {
700 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000701 case Primitive::kPrimInt:
702 case Primitive::kPrimFloat:
703 return Location::StackSlot(GetStackSlot(load->GetLocal()));
704
705 case Primitive::kPrimLong:
706 case Primitive::kPrimDouble:
707 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
708
Alexandre Rames5319def2014-10-23 10:03:10 +0100709 case Primitive::kPrimBoolean:
710 case Primitive::kPrimByte:
711 case Primitive::kPrimChar:
712 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100713 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100714 LOG(FATAL) << "Unexpected type " << type;
715 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000716
Alexandre Rames5319def2014-10-23 10:03:10 +0100717 LOG(FATAL) << "Unreachable";
718 return Location::NoLocation();
719}
720
721void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000722 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100723 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000724 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +0100725 vixl::Label done;
726 __ Cbz(value, &done);
727 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
728 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000729 __ Strb(card, MemOperand(card, temp.X()));
Alexandre Rames5319def2014-10-23 10:03:10 +0100730 __ Bind(&done);
731}
732
733void CodeGeneratorARM64::SetupBlockedRegisters() const {
734 // Block reserved registers:
735 // ip0 (VIXL temporary)
736 // ip1 (VIXL temporary)
Serban Constantinescu02164b32014-11-13 14:05:07 +0000737 // tr
Alexandre Rames5319def2014-10-23 10:03:10 +0100738 // lr
739 // sp is not part of the allocatable registers, so we don't need to block it.
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000740 // TODO: Avoid blocking callee-saved registers, and instead preserve them
741 // where necessary.
Alexandre Rames5319def2014-10-23 10:03:10 +0100742 CPURegList reserved_core_registers = vixl_reserved_core_registers;
743 reserved_core_registers.Combine(runtime_reserved_core_registers);
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000744 reserved_core_registers.Combine(quick_callee_saved_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100745 while (!reserved_core_registers.IsEmpty()) {
746 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
747 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000748 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
749 reserved_fp_registers.Combine(CPURegList::GetCalleeSavedFP());
750 while (!reserved_core_registers.IsEmpty()) {
751 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
752 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100753}
754
755Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
756 if (type == Primitive::kPrimVoid) {
757 LOG(FATAL) << "Unreachable type " << type;
758 }
759
Alexandre Rames5319def2014-10-23 10:03:10 +0100760 if (IsFPType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000761 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
762 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100763 return Location::FpuRegisterLocation(reg);
764 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000765 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
766 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100767 return Location::RegisterLocation(reg);
768 }
769}
770
Alexandre Rames3e69f162014-12-10 10:36:50 +0000771size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
772 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
773 __ Str(reg, MemOperand(sp, stack_index));
774 return kArm64WordSize;
775}
776
777size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
778 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
779 __ Ldr(reg, MemOperand(sp, stack_index));
780 return kArm64WordSize;
781}
782
783size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
784 FPRegister reg = FPRegister(reg_id, kDRegSize);
785 __ Str(reg, MemOperand(sp, stack_index));
786 return kArm64WordSize;
787}
788
789size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
790 FPRegister reg = FPRegister(reg_id, kDRegSize);
791 __ Ldr(reg, MemOperand(sp, stack_index));
792 return kArm64WordSize;
793}
794
Alexandre Rames5319def2014-10-23 10:03:10 +0100795void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
796 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
797}
798
799void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
800 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
801}
802
Alexandre Rames67555f72014-11-18 10:55:16 +0000803void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
804 if (constant->IsIntConstant() || constant->IsLongConstant()) {
805 __ Mov(Register(destination),
806 constant->IsIntConstant() ? constant->AsIntConstant()->GetValue()
807 : constant->AsLongConstant()->GetValue());
808 } else if (constant->IsFloatConstant()) {
809 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
810 } else {
811 DCHECK(constant->IsDoubleConstant());
812 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
813 }
814}
815
Alexandre Rames3e69f162014-12-10 10:36:50 +0000816
817static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
818 DCHECK(constant.IsConstant());
819 HConstant* cst = constant.GetConstant();
820 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
821 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
822 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
823 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
824}
825
826void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000827 if (source.Equals(destination)) {
828 return;
829 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000830
831 // A valid move can always be inferred from the destination and source
832 // locations. When moving from and to a register, the argument type can be
833 // used to generate 32bit instead of 64bit moves. In debug mode we also
834 // checks the coherency of the locations and the type.
835 bool unspecified_type = (type == Primitive::kPrimVoid);
836
837 if (destination.IsRegister() || destination.IsFpuRegister()) {
838 if (unspecified_type) {
839 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
840 if (source.IsStackSlot() ||
841 (src_cst != nullptr && (src_cst->IsIntConstant() || src_cst->IsFloatConstant()))) {
842 // For stack slots and 32bit constants, a 64bit type is appropriate.
843 type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +0000844 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000845 // If the source is a double stack slot or a 64bit constant, a 64bit
846 // type is appropriate. Else the source is a register, and since the
847 // type has not been specified, we chose a 64bit type to force a 64bit
848 // move.
849 type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +0000850 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000851 }
852 DCHECK((destination.IsFpuRegister() && IsFPType(type)) ||
853 (destination.IsRegister() && !IsFPType(type)));
854 CPURegister dst = CPURegisterFrom(destination, type);
855 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
856 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
857 __ Ldr(dst, StackOperandFrom(source));
858 } else if (source.IsConstant()) {
859 DCHECK(CoherentConstantAndType(source, type));
860 MoveConstant(dst, source.GetConstant());
861 } else {
862 if (destination.IsRegister()) {
863 __ Mov(Register(dst), RegisterFrom(source, type));
864 } else {
865 __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
866 }
867 }
868
869 } else { // The destination is not a register. It must be a stack slot.
870 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
871 if (source.IsRegister() || source.IsFpuRegister()) {
872 if (unspecified_type) {
873 if (source.IsRegister()) {
874 type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
875 } else {
876 type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
877 }
878 }
879 DCHECK((destination.IsDoubleStackSlot() == Is64BitType(type)) &&
880 (source.IsFpuRegister() == IsFPType(type)));
881 __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
882 } else if (source.IsConstant()) {
883 DCHECK(unspecified_type || CoherentConstantAndType(source, type));
884 UseScratchRegisterScope temps(GetVIXLAssembler());
885 HConstant* src_cst = source.GetConstant();
886 CPURegister temp;
887 if (src_cst->IsIntConstant()) {
888 temp = temps.AcquireW();
889 } else if (src_cst->IsLongConstant()) {
890 temp = temps.AcquireX();
891 } else if (src_cst->IsFloatConstant()) {
892 temp = temps.AcquireS();
893 } else {
894 DCHECK(src_cst->IsDoubleConstant());
895 temp = temps.AcquireD();
896 }
897 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000898 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000899 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +0000900 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000901 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000902 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000903 // There is generally less pressure on FP registers.
904 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000905 __ Ldr(temp, StackOperandFrom(source));
906 __ Str(temp, StackOperandFrom(destination));
907 }
908 }
909}
910
Alexandre Rames3e69f162014-12-10 10:36:50 +0000911void CodeGeneratorARM64::SwapLocations(Location loc1, Location loc2) {
912 DCHECK(!loc1.IsConstant());
913 DCHECK(!loc2.IsConstant());
914
915 if (loc1.Equals(loc2)) {
916 return;
917 }
918
919 UseScratchRegisterScope temps(GetAssembler()->vixl_masm_);
920
921 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
922 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
923 bool is_fp_reg1 = loc1.IsFpuRegister();
924 bool is_fp_reg2 = loc2.IsFpuRegister();
925
926 if (loc2.IsRegister() && loc1.IsRegister()) {
927 Register r1 = XRegisterFrom(loc1);
928 Register r2 = XRegisterFrom(loc2);
929 Register tmp = temps.AcquireSameSizeAs(r1);
930 __ Mov(tmp, r2);
931 __ Mov(r2, r1);
932 __ Mov(r1, tmp);
933 } else if (is_fp_reg2 && is_fp_reg1) {
934 FPRegister r1 = DRegisterFrom(loc1);
935 FPRegister r2 = DRegisterFrom(loc2);
936 FPRegister tmp = temps.AcquireSameSizeAs(r1);
937 __ Fmov(tmp, r2);
938 __ Fmov(r2, r1);
939 __ Fmov(r1, tmp);
940 } else if (is_slot1 != is_slot2) {
941 MemOperand mem = StackOperandFrom(is_slot1 ? loc1 : loc2);
942 Location reg_loc = is_slot1 ? loc2 : loc1;
943 CPURegister reg, tmp;
944 if (reg_loc.IsFpuRegister()) {
945 reg = DRegisterFrom(reg_loc);
946 tmp = temps.AcquireD();
947 } else {
948 reg = XRegisterFrom(reg_loc);
949 tmp = temps.AcquireX();
950 }
951 __ Ldr(tmp, mem);
952 __ Str(reg, mem);
953 if (reg_loc.IsFpuRegister()) {
954 __ Fmov(FPRegister(reg), FPRegister(tmp));
955 } else {
956 __ Mov(Register(reg), Register(tmp));
957 }
958 } else if (is_slot1 && is_slot2) {
959 MemOperand mem1 = StackOperandFrom(loc1);
960 MemOperand mem2 = StackOperandFrom(loc2);
961 Register tmp1 = loc1.IsStackSlot() ? temps.AcquireW() : temps.AcquireX();
962 Register tmp2 = temps.AcquireSameSizeAs(tmp1);
963 __ Ldr(tmp1, mem1);
964 __ Ldr(tmp2, mem2);
965 __ Str(tmp1, mem2);
966 __ Str(tmp2, mem1);
967 } else {
968 LOG(FATAL) << "Unimplemented";
969 }
970}
971
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000972void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000973 CPURegister dst,
974 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000975 switch (type) {
976 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +0000977 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000978 break;
979 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +0000980 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000981 break;
982 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +0000983 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000984 break;
985 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +0000986 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000987 break;
988 case Primitive::kPrimInt:
989 case Primitive::kPrimNot:
990 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000991 case Primitive::kPrimFloat:
992 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000993 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +0000994 __ Ldr(dst, src);
995 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000996 case Primitive::kPrimVoid:
997 LOG(FATAL) << "Unreachable type " << type;
998 }
999}
1000
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001001void CodeGeneratorARM64::LoadAcquire(Primitive::Type type,
1002 CPURegister dst,
1003 const MemOperand& src) {
1004 UseScratchRegisterScope temps(GetVIXLAssembler());
1005 Register temp_base = temps.AcquireX();
1006
1007 DCHECK(!src.IsRegisterOffset());
1008 DCHECK(!src.IsPreIndex());
1009 DCHECK(!src.IsPostIndex());
1010
1011 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1012 __ Add(temp_base, src.base(), src.offset());
1013 MemOperand base = MemOperand(temp_base);
1014 switch (type) {
1015 case Primitive::kPrimBoolean:
1016 __ Ldarb(Register(dst), base);
1017 break;
1018 case Primitive::kPrimByte:
1019 __ Ldarb(Register(dst), base);
1020 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1021 break;
1022 case Primitive::kPrimChar:
1023 __ Ldarh(Register(dst), base);
1024 break;
1025 case Primitive::kPrimShort:
1026 __ Ldarh(Register(dst), base);
1027 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1028 break;
1029 case Primitive::kPrimInt:
1030 case Primitive::kPrimNot:
1031 case Primitive::kPrimLong:
1032 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
1033 __ Ldar(Register(dst), base);
1034 break;
1035 case Primitive::kPrimFloat:
1036 case Primitive::kPrimDouble: {
1037 DCHECK(dst.IsFPRegister());
1038 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
1039
1040 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1041 __ Ldar(temp, base);
1042 __ Fmov(FPRegister(dst), temp);
1043 break;
1044 }
1045 case Primitive::kPrimVoid:
1046 LOG(FATAL) << "Unreachable type " << type;
1047 }
1048}
1049
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001050void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001051 CPURegister src,
1052 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001053 switch (type) {
1054 case Primitive::kPrimBoolean:
1055 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001056 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001057 break;
1058 case Primitive::kPrimChar:
1059 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001060 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001061 break;
1062 case Primitive::kPrimInt:
1063 case Primitive::kPrimNot:
1064 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001065 case Primitive::kPrimFloat:
1066 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001067 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1068 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001069 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001070 case Primitive::kPrimVoid:
1071 LOG(FATAL) << "Unreachable type " << type;
1072 }
1073}
1074
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001075void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1076 CPURegister src,
1077 const MemOperand& dst) {
1078 UseScratchRegisterScope temps(GetVIXLAssembler());
1079 Register temp_base = temps.AcquireX();
1080
1081 DCHECK(!dst.IsRegisterOffset());
1082 DCHECK(!dst.IsPreIndex());
1083 DCHECK(!dst.IsPostIndex());
1084
1085 // TODO(vixl): Let the MacroAssembler handle this.
1086 __ Add(temp_base, dst.base(), dst.offset());
1087 MemOperand base = MemOperand(temp_base);
1088 switch (type) {
1089 case Primitive::kPrimBoolean:
1090 case Primitive::kPrimByte:
1091 __ Stlrb(Register(src), base);
1092 break;
1093 case Primitive::kPrimChar:
1094 case Primitive::kPrimShort:
1095 __ Stlrh(Register(src), base);
1096 break;
1097 case Primitive::kPrimInt:
1098 case Primitive::kPrimNot:
1099 case Primitive::kPrimLong:
1100 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1101 __ Stlr(Register(src), base);
1102 break;
1103 case Primitive::kPrimFloat:
1104 case Primitive::kPrimDouble: {
1105 DCHECK(src.IsFPRegister());
1106 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1107
1108 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1109 __ Fmov(temp, FPRegister(src));
1110 __ Stlr(temp, base);
1111 break;
1112 }
1113 case Primitive::kPrimVoid:
1114 LOG(FATAL) << "Unreachable type " << type;
1115 }
1116}
1117
Alexandre Rames67555f72014-11-18 10:55:16 +00001118void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) {
1119 DCHECK(current_method.IsW());
1120 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1121}
1122
1123void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1124 HInstruction* instruction,
1125 uint32_t dex_pc) {
1126 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1127 __ Blr(lr);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001128 if (instruction != nullptr) {
1129 RecordPcInfo(instruction, dex_pc);
1130 DCHECK(instruction->IsSuspendCheck()
1131 || instruction->IsBoundsCheck()
1132 || instruction->IsNullCheck()
1133 || instruction->IsDivZeroCheck()
1134 || !IsLeafMethod());
1135 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001136}
1137
1138void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1139 vixl::Register class_reg) {
1140 UseScratchRegisterScope temps(GetVIXLAssembler());
1141 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001142 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1143
Serban Constantinescu02164b32014-11-13 14:05:07 +00001144 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001145 if (kUseAcquireRelease) {
1146 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1147 __ Add(temp, class_reg, status_offset);
1148 __ Ldar(temp, HeapOperand(temp));
1149 __ Cmp(temp, mirror::Class::kStatusInitialized);
1150 __ B(lt, slow_path->GetEntryLabel());
1151 } else {
1152 __ Ldr(temp, HeapOperand(class_reg, status_offset));
1153 __ Cmp(temp, mirror::Class::kStatusInitialized);
1154 __ B(lt, slow_path->GetEntryLabel());
1155 __ Dmb(InnerShareable, BarrierReads);
1156 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001157 __ Bind(slow_path->GetExitLabel());
1158}
Alexandre Rames5319def2014-10-23 10:03:10 +01001159
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001160void InstructionCodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
1161 BarrierType type = BarrierAll;
1162
1163 switch (kind) {
1164 case MemBarrierKind::kAnyAny:
1165 case MemBarrierKind::kAnyStore: {
1166 type = BarrierAll;
1167 break;
1168 }
1169 case MemBarrierKind::kLoadAny: {
1170 type = BarrierReads;
1171 break;
1172 }
1173 case MemBarrierKind::kStoreStore: {
1174 type = BarrierWrites;
1175 break;
1176 }
1177 default:
1178 LOG(FATAL) << "Unexpected memory barrier " << kind;
1179 }
1180 __ Dmb(InnerShareable, type);
1181}
1182
Serban Constantinescu02164b32014-11-13 14:05:07 +00001183void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1184 HBasicBlock* successor) {
1185 SuspendCheckSlowPathARM64* slow_path =
1186 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1187 codegen_->AddSlowPath(slow_path);
1188 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1189 Register temp = temps.AcquireW();
1190
1191 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1192 if (successor == nullptr) {
1193 __ Cbnz(temp, slow_path->GetEntryLabel());
1194 __ Bind(slow_path->GetReturnLabel());
1195 } else {
1196 __ Cbz(temp, codegen_->GetLabelOf(successor));
1197 __ B(slow_path->GetEntryLabel());
1198 // slow_path will return to GetLabelOf(successor).
1199 }
1200}
1201
Alexandre Rames5319def2014-10-23 10:03:10 +01001202InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1203 CodeGeneratorARM64* codegen)
1204 : HGraphVisitor(graph),
1205 assembler_(codegen->GetAssembler()),
1206 codegen_(codegen) {}
1207
1208#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001209 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001210
1211#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1212
1213enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001214 // Using a base helps identify when we hit such breakpoints.
1215 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001216#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1217 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1218#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1219};
1220
1221#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
1222 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001223 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +01001224 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1225 } \
1226 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1227 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1228 locations->SetOut(Location::Any()); \
1229 }
1230 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1231#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1232
1233#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001234#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001235
Alexandre Rames67555f72014-11-18 10:55:16 +00001236void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001237 DCHECK_EQ(instr->InputCount(), 2U);
1238 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1239 Primitive::Type type = instr->GetResultType();
1240 switch (type) {
1241 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001242 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001243 locations->SetInAt(0, Location::RequiresRegister());
1244 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001245 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001246 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001247
1248 case Primitive::kPrimFloat:
1249 case Primitive::kPrimDouble:
1250 locations->SetInAt(0, Location::RequiresFpuRegister());
1251 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001252 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001253 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001254
Alexandre Rames5319def2014-10-23 10:03:10 +01001255 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001256 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001257 }
1258}
1259
Alexandre Rames67555f72014-11-18 10:55:16 +00001260void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001261 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001262
1263 switch (type) {
1264 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001265 case Primitive::kPrimLong: {
1266 Register dst = OutputRegister(instr);
1267 Register lhs = InputRegisterAt(instr, 0);
1268 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001269 if (instr->IsAdd()) {
1270 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001271 } else if (instr->IsAnd()) {
1272 __ And(dst, lhs, rhs);
1273 } else if (instr->IsOr()) {
1274 __ Orr(dst, lhs, rhs);
1275 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001276 __ Sub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001277 } else {
1278 DCHECK(instr->IsXor());
1279 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001280 }
1281 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001282 }
1283 case Primitive::kPrimFloat:
1284 case Primitive::kPrimDouble: {
1285 FPRegister dst = OutputFPRegister(instr);
1286 FPRegister lhs = InputFPRegisterAt(instr, 0);
1287 FPRegister rhs = InputFPRegisterAt(instr, 1);
1288 if (instr->IsAdd()) {
1289 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001290 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001291 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001292 } else {
1293 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001294 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001295 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001296 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001297 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001298 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001299 }
1300}
1301
Serban Constantinescu02164b32014-11-13 14:05:07 +00001302void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1303 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1304
1305 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1306 Primitive::Type type = instr->GetResultType();
1307 switch (type) {
1308 case Primitive::kPrimInt:
1309 case Primitive::kPrimLong: {
1310 locations->SetInAt(0, Location::RequiresRegister());
1311 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1312 locations->SetOut(Location::RequiresRegister());
1313 break;
1314 }
1315 default:
1316 LOG(FATAL) << "Unexpected shift type " << type;
1317 }
1318}
1319
1320void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1321 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1322
1323 Primitive::Type type = instr->GetType();
1324 switch (type) {
1325 case Primitive::kPrimInt:
1326 case Primitive::kPrimLong: {
1327 Register dst = OutputRegister(instr);
1328 Register lhs = InputRegisterAt(instr, 0);
1329 Operand rhs = InputOperandAt(instr, 1);
1330 if (rhs.IsImmediate()) {
1331 uint32_t shift_value = (type == Primitive::kPrimInt)
1332 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1333 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1334 if (instr->IsShl()) {
1335 __ Lsl(dst, lhs, shift_value);
1336 } else if (instr->IsShr()) {
1337 __ Asr(dst, lhs, shift_value);
1338 } else {
1339 __ Lsr(dst, lhs, shift_value);
1340 }
1341 } else {
1342 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1343
1344 if (instr->IsShl()) {
1345 __ Lsl(dst, lhs, rhs_reg);
1346 } else if (instr->IsShr()) {
1347 __ Asr(dst, lhs, rhs_reg);
1348 } else {
1349 __ Lsr(dst, lhs, rhs_reg);
1350 }
1351 }
1352 break;
1353 }
1354 default:
1355 LOG(FATAL) << "Unexpected shift operation type " << type;
1356 }
1357}
1358
Alexandre Rames5319def2014-10-23 10:03:10 +01001359void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001360 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001361}
1362
1363void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001364 HandleBinaryOp(instruction);
1365}
1366
1367void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1368 HandleBinaryOp(instruction);
1369}
1370
1371void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1372 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001373}
1374
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001375void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1376 LocationSummary* locations =
1377 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1378 locations->SetInAt(0, Location::RequiresRegister());
1379 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1380 locations->SetOut(Location::RequiresRegister());
1381}
1382
1383void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1384 LocationSummary* locations = instruction->GetLocations();
1385 Primitive::Type type = instruction->GetType();
1386 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001387 Location index = locations->InAt(1);
1388 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001389 MemOperand source = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001390 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001391
1392 if (index.IsConstant()) {
1393 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001394 source = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001395 } else {
1396 Register temp = temps.AcquireSameSizeAs(obj);
1397 Register index_reg = RegisterFrom(index, Primitive::kPrimInt);
1398 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001399 source = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001400 }
1401
Alexandre Rames67555f72014-11-18 10:55:16 +00001402 codegen_->Load(type, OutputCPURegister(instruction), source);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001403}
1404
Alexandre Rames5319def2014-10-23 10:03:10 +01001405void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1406 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1407 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001408 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001409}
1410
1411void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
1412 __ Ldr(OutputRegister(instruction),
1413 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
1414}
1415
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001416void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
1417 Primitive::Type value_type = instruction->GetComponentType();
1418 bool is_object = value_type == Primitive::kPrimNot;
1419 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1420 instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
1421 if (is_object) {
1422 InvokeRuntimeCallingConvention calling_convention;
1423 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1424 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1425 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1426 } else {
1427 locations->SetInAt(0, Location::RequiresRegister());
1428 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1429 locations->SetInAt(2, Location::RequiresRegister());
1430 }
1431}
1432
1433void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1434 Primitive::Type value_type = instruction->GetComponentType();
1435 if (value_type == Primitive::kPrimNot) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001436 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001437 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001438 } else {
1439 LocationSummary* locations = instruction->GetLocations();
1440 Register obj = InputRegisterAt(instruction, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001441 CPURegister value = InputCPURegisterAt(instruction, 2);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001442 Location index = locations->InAt(1);
1443 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001444 MemOperand destination = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001445 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001446
1447 if (index.IsConstant()) {
1448 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001449 destination = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001450 } else {
1451 Register temp = temps.AcquireSameSizeAs(obj);
1452 Register index_reg = InputRegisterAt(instruction, 1);
1453 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001454 destination = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001455 }
1456
1457 codegen_->Store(value_type, value, destination);
1458 }
1459}
1460
Alexandre Rames67555f72014-11-18 10:55:16 +00001461void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1462 LocationSummary* locations =
1463 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1464 locations->SetInAt(0, Location::RequiresRegister());
1465 locations->SetInAt(1, Location::RequiresRegister());
1466 if (instruction->HasUses()) {
1467 locations->SetOut(Location::SameAsFirstInput());
1468 }
1469}
1470
1471void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001472 LocationSummary* locations = instruction->GetLocations();
1473 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1474 instruction, locations->InAt(0), locations->InAt(1));
Alexandre Rames67555f72014-11-18 10:55:16 +00001475 codegen_->AddSlowPath(slow_path);
1476
1477 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1478 __ B(slow_path->GetEntryLabel(), hs);
1479}
1480
1481void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
1482 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1483 instruction, LocationSummary::kCallOnSlowPath);
1484 locations->SetInAt(0, Location::RequiresRegister());
1485 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001486 locations->AddTemp(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001487}
1488
1489void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001490 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +00001491 Register obj = InputRegisterAt(instruction, 0);;
1492 Register cls = InputRegisterAt(instruction, 1);;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001493 Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
Alexandre Rames67555f72014-11-18 10:55:16 +00001494
Alexandre Rames3e69f162014-12-10 10:36:50 +00001495 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1496 instruction, locations->InAt(1), LocationFrom(obj_cls), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001497 codegen_->AddSlowPath(slow_path);
1498
1499 // TODO: avoid this check if we know obj is not null.
1500 __ Cbz(obj, slow_path->GetExitLabel());
1501 // Compare the class of `obj` with `cls`.
Alexandre Rames3e69f162014-12-10 10:36:50 +00001502 __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
1503 __ Cmp(obj_cls, cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00001504 __ B(ne, slow_path->GetEntryLabel());
1505 __ Bind(slow_path->GetExitLabel());
1506}
1507
1508void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1509 LocationSummary* locations =
1510 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1511 locations->SetInAt(0, Location::RequiresRegister());
1512 if (check->HasUses()) {
1513 locations->SetOut(Location::SameAsFirstInput());
1514 }
1515}
1516
1517void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1518 // We assume the class is not null.
1519 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1520 check->GetLoadClass(), check, check->GetDexPc(), true);
1521 codegen_->AddSlowPath(slow_path);
1522 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1523}
1524
Serban Constantinescu02164b32014-11-13 14:05:07 +00001525void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001526 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00001527 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1528 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001529 switch (in_type) {
1530 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001531 locations->SetInAt(0, Location::RequiresRegister());
1532 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
1533 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1534 break;
1535 }
1536 case Primitive::kPrimFloat:
1537 case Primitive::kPrimDouble: {
1538 locations->SetInAt(0, Location::RequiresFpuRegister());
1539 locations->SetInAt(1, Location::RequiresFpuRegister());
1540 locations->SetOut(Location::RequiresRegister());
1541 break;
1542 }
1543 default:
1544 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1545 }
1546}
1547
1548void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1549 Primitive::Type in_type = compare->InputAt(0)->GetType();
1550
1551 // 0 if: left == right
1552 // 1 if: left > right
1553 // -1 if: left < right
1554 switch (in_type) {
1555 case Primitive::kPrimLong: {
1556 Register result = OutputRegister(compare);
1557 Register left = InputRegisterAt(compare, 0);
1558 Operand right = InputOperandAt(compare, 1);
1559
1560 __ Cmp(left, right);
1561 __ Cset(result, ne);
1562 __ Cneg(result, result, lt);
1563 break;
1564 }
1565 case Primitive::kPrimFloat:
1566 case Primitive::kPrimDouble: {
1567 Register result = OutputRegister(compare);
1568 FPRegister left = InputFPRegisterAt(compare, 0);
1569 FPRegister right = InputFPRegisterAt(compare, 1);
1570
1571 __ Fcmp(left, right);
1572 if (compare->IsGtBias()) {
1573 __ Cset(result, ne);
1574 } else {
1575 __ Csetm(result, ne);
1576 }
1577 __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
Alexandre Rames5319def2014-10-23 10:03:10 +01001578 break;
1579 }
1580 default:
1581 LOG(FATAL) << "Unimplemented compare type " << in_type;
1582 }
1583}
1584
1585void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1586 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1587 locations->SetInAt(0, Location::RequiresRegister());
1588 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1589 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001590 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001591 }
1592}
1593
1594void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1595 if (!instruction->NeedsMaterialization()) {
1596 return;
1597 }
1598
1599 LocationSummary* locations = instruction->GetLocations();
1600 Register lhs = InputRegisterAt(instruction, 0);
1601 Operand rhs = InputOperandAt(instruction, 1);
1602 Register res = RegisterFrom(locations->Out(), instruction->GetType());
1603 Condition cond = ARM64Condition(instruction->GetCondition());
1604
1605 __ Cmp(lhs, rhs);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001606 __ Cset(res, cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001607}
1608
1609#define FOR_EACH_CONDITION_INSTRUCTION(M) \
1610 M(Equal) \
1611 M(NotEqual) \
1612 M(LessThan) \
1613 M(LessThanOrEqual) \
1614 M(GreaterThan) \
1615 M(GreaterThanOrEqual)
1616#define DEFINE_CONDITION_VISITORS(Name) \
1617void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
1618void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1619FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00001620#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01001621#undef FOR_EACH_CONDITION_INSTRUCTION
1622
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001623void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1624 LocationSummary* locations =
1625 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1626 switch (div->GetResultType()) {
1627 case Primitive::kPrimInt:
1628 case Primitive::kPrimLong:
1629 locations->SetInAt(0, Location::RequiresRegister());
1630 locations->SetInAt(1, Location::RequiresRegister());
1631 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1632 break;
1633
1634 case Primitive::kPrimFloat:
1635 case Primitive::kPrimDouble:
1636 locations->SetInAt(0, Location::RequiresFpuRegister());
1637 locations->SetInAt(1, Location::RequiresFpuRegister());
1638 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1639 break;
1640
1641 default:
1642 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1643 }
1644}
1645
1646void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1647 Primitive::Type type = div->GetResultType();
1648 switch (type) {
1649 case Primitive::kPrimInt:
1650 case Primitive::kPrimLong:
1651 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
1652 break;
1653
1654 case Primitive::kPrimFloat:
1655 case Primitive::kPrimDouble:
1656 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1657 break;
1658
1659 default:
1660 LOG(FATAL) << "Unexpected div type " << type;
1661 }
1662}
1663
Alexandre Rames67555f72014-11-18 10:55:16 +00001664void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1665 LocationSummary* locations =
1666 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1667 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1668 if (instruction->HasUses()) {
1669 locations->SetOut(Location::SameAsFirstInput());
1670 }
1671}
1672
1673void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1674 SlowPathCodeARM64* slow_path =
1675 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
1676 codegen_->AddSlowPath(slow_path);
1677 Location value = instruction->GetLocations()->InAt(0);
1678
Alexandre Rames3e69f162014-12-10 10:36:50 +00001679 Primitive::Type type = instruction->GetType();
1680
1681 if ((type != Primitive::kPrimInt) && (type != Primitive::kPrimLong)) {
1682 LOG(FATAL) << "Unexpected type " << type << "for DivZeroCheck.";
1683 return;
1684 }
1685
Alexandre Rames67555f72014-11-18 10:55:16 +00001686 if (value.IsConstant()) {
1687 int64_t divisor = Int64ConstantFrom(value);
1688 if (divisor == 0) {
1689 __ B(slow_path->GetEntryLabel());
1690 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001691 // A division by a non-null constant is valid. We don't need to perform
1692 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00001693 }
1694 } else {
1695 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
1696 }
1697}
1698
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001699void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1700 LocationSummary* locations =
1701 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1702 locations->SetOut(Location::ConstantLocation(constant));
1703}
1704
1705void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1706 UNUSED(constant);
1707 // Will be generated at use site.
1708}
1709
Alexandre Rames5319def2014-10-23 10:03:10 +01001710void LocationsBuilderARM64::VisitExit(HExit* exit) {
1711 exit->SetLocations(nullptr);
1712}
1713
1714void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001715 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +01001716 if (kIsDebugBuild) {
1717 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
Alexandre Rames67555f72014-11-18 10:55:16 +00001718 __ Brk(__LINE__); // TODO: Introduce special markers for such code locations.
Alexandre Rames5319def2014-10-23 10:03:10 +01001719 }
1720}
1721
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001722void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
1723 LocationSummary* locations =
1724 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1725 locations->SetOut(Location::ConstantLocation(constant));
1726}
1727
1728void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
1729 UNUSED(constant);
1730 // Will be generated at use site.
1731}
1732
Alexandre Rames5319def2014-10-23 10:03:10 +01001733void LocationsBuilderARM64::VisitGoto(HGoto* got) {
1734 got->SetLocations(nullptr);
1735}
1736
1737void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
1738 HBasicBlock* successor = got->GetSuccessor();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001739 DCHECK(!successor->IsExitBlock());
1740 HBasicBlock* block = got->GetBlock();
1741 HInstruction* previous = got->GetPrevious();
1742 HLoopInformation* info = block->GetLoopInformation();
1743
1744 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
1745 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
1746 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1747 return;
1748 }
1749 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1750 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1751 }
1752 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001753 __ B(codegen_->GetLabelOf(successor));
1754 }
1755}
1756
1757void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
1758 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1759 HInstruction* cond = if_instr->InputAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001760 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001761 locations->SetInAt(0, Location::RequiresRegister());
1762 }
1763}
1764
1765void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
1766 HInstruction* cond = if_instr->InputAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001767 HCondition* condition = cond->AsCondition();
1768 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1769 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1770
Serban Constantinescu02164b32014-11-13 14:05:07 +00001771 if (cond->IsIntConstant()) {
1772 int32_t cond_value = cond->AsIntConstant()->GetValue();
1773 if (cond_value == 1) {
1774 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfTrueSuccessor())) {
1775 __ B(true_target);
1776 }
1777 return;
1778 } else {
1779 DCHECK_EQ(cond_value, 0);
1780 }
1781 } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001782 // The condition instruction has been materialized, compare the output to 0.
1783 Location cond_val = if_instr->GetLocations()->InAt(0);
1784 DCHECK(cond_val.IsRegister());
1785 __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001786 } else {
1787 // The condition instruction has not been materialized, use its inputs as
1788 // the comparison and its condition as the branch condition.
1789 Register lhs = InputRegisterAt(condition, 0);
1790 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001791 Condition arm64_cond = ARM64Condition(condition->GetCondition());
1792 if ((arm64_cond == eq || arm64_cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
1793 if (arm64_cond == eq) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001794 __ Cbz(lhs, true_target);
1795 } else {
1796 __ Cbnz(lhs, true_target);
1797 }
1798 } else {
1799 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001800 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001801 }
1802 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001803 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
1804 __ B(false_target);
1805 }
1806}
1807
1808void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001809 LocationSummary* locations =
1810 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001811 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001812 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001813}
1814
1815void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001816 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001817
1818 if (instruction->IsVolatile()) {
1819 if (kUseAcquireRelease) {
1820 codegen_->LoadAcquire(instruction->GetType(), OutputCPURegister(instruction), field);
1821 } else {
1822 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
1823 // For IRIW sequential consistency kLoadAny is not sufficient.
1824 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1825 }
1826 } else {
1827 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
1828 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001829}
1830
1831void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001832 LocationSummary* locations =
1833 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001834 locations->SetInAt(0, Location::RequiresRegister());
1835 locations->SetInAt(1, Location::RequiresRegister());
1836}
1837
1838void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001839 Register obj = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001840 CPURegister value = InputCPURegisterAt(instruction, 1);
1841 Offset offset = instruction->GetFieldOffset();
1842 Primitive::Type field_type = instruction->GetFieldType();
1843
1844 if (instruction->IsVolatile()) {
1845 if (kUseAcquireRelease) {
1846 codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
1847 } else {
1848 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
1849 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1850 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1851 }
1852 } else {
1853 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1854 }
1855
1856 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001857 codegen_->MarkGCCard(obj, Register(value));
Alexandre Rames5319def2014-10-23 10:03:10 +01001858 }
1859}
1860
Alexandre Rames67555f72014-11-18 10:55:16 +00001861void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
1862 LocationSummary::CallKind call_kind =
1863 instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
1864 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
1865 locations->SetInAt(0, Location::RequiresRegister());
1866 locations->SetInAt(1, Location::RequiresRegister());
1867 locations->SetOut(Location::RequiresRegister(), true); // The output does overlap inputs.
1868}
1869
1870void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
1871 LocationSummary* locations = instruction->GetLocations();
1872 Register obj = InputRegisterAt(instruction, 0);;
1873 Register cls = InputRegisterAt(instruction, 1);;
1874 Register out = OutputRegister(instruction);
1875
1876 vixl::Label done;
1877
1878 // Return 0 if `obj` is null.
1879 // TODO: Avoid this check if we know `obj` is not null.
1880 __ Mov(out, 0);
1881 __ Cbz(obj, &done);
1882
1883 // Compare the class of `obj` with `cls`.
Serban Constantinescu02164b32014-11-13 14:05:07 +00001884 __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
Alexandre Rames67555f72014-11-18 10:55:16 +00001885 __ Cmp(out, cls);
1886 if (instruction->IsClassFinal()) {
1887 // Classes must be equal for the instanceof to succeed.
1888 __ Cset(out, eq);
1889 } else {
1890 // If the classes are not equal, we go into a slow path.
1891 DCHECK(locations->OnlyCallsOnSlowPath());
1892 SlowPathCodeARM64* slow_path =
Alexandre Rames3e69f162014-12-10 10:36:50 +00001893 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1894 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001895 codegen_->AddSlowPath(slow_path);
1896 __ B(ne, slow_path->GetEntryLabel());
1897 __ Mov(out, 1);
1898 __ Bind(slow_path->GetExitLabel());
1899 }
1900
1901 __ Bind(&done);
1902}
1903
Alexandre Rames5319def2014-10-23 10:03:10 +01001904void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
1905 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1906 locations->SetOut(Location::ConstantLocation(constant));
1907}
1908
1909void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
1910 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001911 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001912}
1913
Alexandre Rames5319def2014-10-23 10:03:10 +01001914void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
1915 LocationSummary* locations =
1916 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1917 locations->AddTemp(LocationFrom(x0));
1918
1919 InvokeDexCallingConventionVisitor calling_convention_visitor;
1920 for (size_t i = 0; i < invoke->InputCount(); i++) {
1921 HInstruction* input = invoke->InputAt(i);
1922 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1923 }
1924
1925 Primitive::Type return_type = invoke->GetType();
1926 if (return_type != Primitive::kPrimVoid) {
1927 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
1928 }
1929}
1930
Alexandre Rames67555f72014-11-18 10:55:16 +00001931void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1932 HandleInvoke(invoke);
1933}
1934
1935void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1936 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1937 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1938 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1939 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1940 Location receiver = invoke->GetLocations()->InAt(0);
1941 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00001942 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00001943
1944 // The register ip1 is required to be used for the hidden argument in
1945 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
1946 UseScratchRegisterScope scratch_scope(GetVIXLAssembler());
1947 scratch_scope.Exclude(ip1);
1948 __ Mov(ip1, invoke->GetDexMethodIndex());
1949
1950 // temp = object->GetClass();
1951 if (receiver.IsStackSlot()) {
1952 __ Ldr(temp, StackOperandFrom(receiver));
1953 __ Ldr(temp, HeapOperand(temp, class_offset));
1954 } else {
1955 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
1956 }
1957 // temp = temp->GetImtEntryAt(method_offset);
1958 __ Ldr(temp, HeapOperand(temp, method_offset));
1959 // lr = temp->GetEntryPoint();
1960 __ Ldr(lr, HeapOperand(temp, entry_point));
1961 // lr();
1962 __ Blr(lr);
1963 DCHECK(!codegen_->IsLeafMethod());
1964 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1965}
1966
1967void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1968 HandleInvoke(invoke);
1969}
1970
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001971void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001972 HandleInvoke(invoke);
1973}
1974
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001975void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001976 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1977 // Make sure that ArtMethod* is passed in W0 as per the calling convention
1978 DCHECK(temp.Is(w0));
1979 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001980 invoke->GetDexMethodIndex() * kHeapRefSize;
Alexandre Rames5319def2014-10-23 10:03:10 +01001981
1982 // TODO: Implement all kinds of calls:
1983 // 1) boot -> boot
1984 // 2) app -> boot
1985 // 3) app -> app
1986 //
1987 // Currently we implement the app -> app logic, which looks up in the resolve cache.
1988
1989 // temp = method;
Alexandre Rames67555f72014-11-18 10:55:16 +00001990 codegen_->LoadCurrentMethod(temp);
Nicolas Geoffray4e44c822014-12-17 12:25:12 +00001991 // temp = temp->dex_cache_resolved_methods_;
1992 __ Ldr(temp, HeapOperand(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset()));
1993 // temp = temp[index_in_cache];
1994 __ Ldr(temp, HeapOperand(temp, index_in_cache));
Alexandre Rames5319def2014-10-23 10:03:10 +01001995 // lr = temp->entry_point_from_quick_compiled_code_;
Serban Constantinescu02164b32014-11-13 14:05:07 +00001996 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1997 kArm64WordSize)));
Alexandre Rames5319def2014-10-23 10:03:10 +01001998 // lr();
1999 __ Blr(lr);
2000
2001 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2002 DCHECK(!codegen_->IsLeafMethod());
2003}
2004
2005void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
2006 LocationSummary* locations = invoke->GetLocations();
2007 Location receiver = locations->InAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002008 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002009 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
2010 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
2011 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00002012 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames5319def2014-10-23 10:03:10 +01002013
2014 // temp = object->GetClass();
2015 if (receiver.IsStackSlot()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002016 __ Ldr(temp, MemOperand(sp, receiver.GetStackIndex()));
2017 __ Ldr(temp, HeapOperand(temp, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002018 } else {
2019 DCHECK(receiver.IsRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002020 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002021 }
2022 // temp = temp->GetMethodAt(method_offset);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002023 __ Ldr(temp, HeapOperand(temp, method_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002024 // lr = temp->GetEntryPoint();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002025 __ Ldr(lr, HeapOperand(temp, entry_point.SizeValue()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002026 // lr();
2027 __ Blr(lr);
2028 DCHECK(!codegen_->IsLeafMethod());
2029 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2030}
2031
Alexandre Rames67555f72014-11-18 10:55:16 +00002032void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
2033 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
2034 : LocationSummary::kNoCall;
2035 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2036 locations->SetOut(Location::RequiresRegister());
2037}
2038
2039void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
2040 Register out = OutputRegister(cls);
2041 if (cls->IsReferrersClass()) {
2042 DCHECK(!cls->CanCallRuntime());
2043 DCHECK(!cls->MustGenerateClinitCheck());
2044 codegen_->LoadCurrentMethod(out);
2045 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2046 } else {
2047 DCHECK(cls->CanCallRuntime());
2048 codegen_->LoadCurrentMethod(out);
2049 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002050 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002051
2052 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2053 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2054 codegen_->AddSlowPath(slow_path);
2055 __ Cbz(out, slow_path->GetEntryLabel());
2056 if (cls->MustGenerateClinitCheck()) {
2057 GenerateClassInitializationCheck(slow_path, out);
2058 } else {
2059 __ Bind(slow_path->GetExitLabel());
2060 }
2061 }
2062}
2063
2064void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
2065 LocationSummary* locations =
2066 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2067 locations->SetOut(Location::RequiresRegister());
2068}
2069
2070void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
2071 MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
2072 __ Ldr(OutputRegister(instruction), exception);
2073 __ Str(wzr, exception);
2074}
2075
Alexandre Rames5319def2014-10-23 10:03:10 +01002076void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
2077 load->SetLocations(nullptr);
2078}
2079
2080void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
2081 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002082 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01002083}
2084
Alexandre Rames67555f72014-11-18 10:55:16 +00002085void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
2086 LocationSummary* locations =
2087 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2088 locations->SetOut(Location::RequiresRegister());
2089}
2090
2091void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
2092 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
2093 codegen_->AddSlowPath(slow_path);
2094
2095 Register out = OutputRegister(load);
2096 codegen_->LoadCurrentMethod(out);
Mathieu Chartiereace4582014-11-24 18:29:54 -08002097 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2098 __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002099 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002100 __ Cbz(out, slow_path->GetEntryLabel());
2101 __ Bind(slow_path->GetExitLabel());
2102}
2103
Alexandre Rames5319def2014-10-23 10:03:10 +01002104void LocationsBuilderARM64::VisitLocal(HLocal* local) {
2105 local->SetLocations(nullptr);
2106}
2107
2108void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
2109 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
2110}
2111
2112void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
2113 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2114 locations->SetOut(Location::ConstantLocation(constant));
2115}
2116
2117void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
2118 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002119 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01002120}
2121
Alexandre Rames67555f72014-11-18 10:55:16 +00002122void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2123 LocationSummary* locations =
2124 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2125 InvokeRuntimeCallingConvention calling_convention;
2126 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2127}
2128
2129void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2130 codegen_->InvokeRuntime(instruction->IsEnter()
2131 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2132 instruction,
2133 instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002134 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002135}
2136
Alexandre Rames42d641b2014-10-27 14:00:51 +00002137void LocationsBuilderARM64::VisitMul(HMul* mul) {
2138 LocationSummary* locations =
2139 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2140 switch (mul->GetResultType()) {
2141 case Primitive::kPrimInt:
2142 case Primitive::kPrimLong:
2143 locations->SetInAt(0, Location::RequiresRegister());
2144 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002145 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002146 break;
2147
2148 case Primitive::kPrimFloat:
2149 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002150 locations->SetInAt(0, Location::RequiresFpuRegister());
2151 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002152 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002153 break;
2154
2155 default:
2156 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2157 }
2158}
2159
2160void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
2161 switch (mul->GetResultType()) {
2162 case Primitive::kPrimInt:
2163 case Primitive::kPrimLong:
2164 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2165 break;
2166
2167 case Primitive::kPrimFloat:
2168 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002169 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00002170 break;
2171
2172 default:
2173 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2174 }
2175}
2176
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002177void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
2178 LocationSummary* locations =
2179 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2180 switch (neg->GetResultType()) {
2181 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00002182 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002183 locations->SetInAt(0, Location::RegisterOrConstant(neg->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00002184 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002185 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002186
2187 case Primitive::kPrimFloat:
2188 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002189 locations->SetInAt(0, Location::RequiresFpuRegister());
2190 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002191 break;
2192
2193 default:
2194 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2195 }
2196}
2197
2198void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
2199 switch (neg->GetResultType()) {
2200 case Primitive::kPrimInt:
2201 case Primitive::kPrimLong:
2202 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
2203 break;
2204
2205 case Primitive::kPrimFloat:
2206 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002207 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002208 break;
2209
2210 default:
2211 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2212 }
2213}
2214
2215void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
2216 LocationSummary* locations =
2217 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2218 InvokeRuntimeCallingConvention calling_convention;
2219 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002220 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002221 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002222 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2223 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2224 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002225}
2226
2227void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
2228 LocationSummary* locations = instruction->GetLocations();
2229 InvokeRuntimeCallingConvention calling_convention;
2230 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2231 DCHECK(type_index.Is(w0));
2232 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002233 DCHECK(current_method.Is(w2));
Alexandre Rames67555f72014-11-18 10:55:16 +00002234 codegen_->LoadCurrentMethod(current_method);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002235 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002236 codegen_->InvokeRuntime(
2237 QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002238 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2239 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002240}
2241
Alexandre Rames5319def2014-10-23 10:03:10 +01002242void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
2243 LocationSummary* locations =
2244 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2245 InvokeRuntimeCallingConvention calling_convention;
2246 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2247 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2248 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002249 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002250}
2251
2252void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
2253 LocationSummary* locations = instruction->GetLocations();
2254 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2255 DCHECK(type_index.Is(w0));
2256 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2257 DCHECK(current_method.Is(w1));
Alexandre Rames67555f72014-11-18 10:55:16 +00002258 codegen_->LoadCurrentMethod(current_method);
Alexandre Rames5319def2014-10-23 10:03:10 +01002259 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002260 codegen_->InvokeRuntime(
2261 QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002262 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002263}
2264
2265void LocationsBuilderARM64::VisitNot(HNot* instruction) {
2266 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00002267 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002268 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002269}
2270
2271void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
2272 switch (instruction->InputAt(0)->GetType()) {
2273 case Primitive::kPrimBoolean:
2274 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1));
2275 break;
2276
2277 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002278 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01002279 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002280 break;
2281
2282 default:
2283 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
2284 }
2285}
2286
2287void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
2288 LocationSummary* locations =
2289 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2290 locations->SetInAt(0, Location::RequiresRegister());
2291 if (instruction->HasUses()) {
2292 locations->SetOut(Location::SameAsFirstInput());
2293 }
2294}
2295
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002296void InstructionCodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
2297 Location obj = instruction->GetLocations()->InAt(0);
2298
2299 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
2300 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2301}
2302
2303void InstructionCodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002304 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
2305 codegen_->AddSlowPath(slow_path);
2306
2307 LocationSummary* locations = instruction->GetLocations();
2308 Location obj = locations->InAt(0);
2309 if (obj.IsRegister()) {
2310 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
2311 } else {
2312 DCHECK(obj.IsConstant()) << obj;
2313 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2314 __ B(slow_path->GetEntryLabel());
2315 }
2316}
2317
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002318void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
2319 if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2320 GenerateImplicitNullCheck(instruction);
2321 } else {
2322 GenerateExplicitNullCheck(instruction);
2323 }
2324}
2325
Alexandre Rames67555f72014-11-18 10:55:16 +00002326void LocationsBuilderARM64::VisitOr(HOr* instruction) {
2327 HandleBinaryOp(instruction);
2328}
2329
2330void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
2331 HandleBinaryOp(instruction);
2332}
2333
Alexandre Rames3e69f162014-12-10 10:36:50 +00002334void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2335 LOG(FATAL) << "Unreachable";
2336}
2337
2338void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
2339 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2340}
2341
Alexandre Rames5319def2014-10-23 10:03:10 +01002342void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
2343 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2344 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2345 if (location.IsStackSlot()) {
2346 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2347 } else if (location.IsDoubleStackSlot()) {
2348 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2349 }
2350 locations->SetOut(location);
2351}
2352
2353void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
2354 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002355 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002356}
2357
2358void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
2359 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2360 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2361 locations->SetInAt(i, Location::Any());
2362 }
2363 locations->SetOut(Location::Any());
2364}
2365
2366void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002367 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002368 LOG(FATAL) << "Unreachable";
2369}
2370
Serban Constantinescu02164b32014-11-13 14:05:07 +00002371void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002372 Primitive::Type type = rem->GetResultType();
2373 LocationSummary::CallKind call_kind = IsFPType(type) ? LocationSummary::kCall
2374 : LocationSummary::kNoCall;
2375 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2376
2377 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002378 case Primitive::kPrimInt:
2379 case Primitive::kPrimLong:
2380 locations->SetInAt(0, Location::RequiresRegister());
2381 locations->SetInAt(1, Location::RequiresRegister());
2382 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2383 break;
2384
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002385 case Primitive::kPrimFloat:
2386 case Primitive::kPrimDouble: {
2387 InvokeRuntimeCallingConvention calling_convention;
2388 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
2389 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
2390 locations->SetOut(calling_convention.GetReturnLocation(type));
2391
2392 break;
2393 }
2394
Serban Constantinescu02164b32014-11-13 14:05:07 +00002395 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002396 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00002397 }
2398}
2399
2400void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
2401 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002402
Serban Constantinescu02164b32014-11-13 14:05:07 +00002403 switch (type) {
2404 case Primitive::kPrimInt:
2405 case Primitive::kPrimLong: {
2406 UseScratchRegisterScope temps(GetVIXLAssembler());
2407 Register dividend = InputRegisterAt(rem, 0);
2408 Register divisor = InputRegisterAt(rem, 1);
2409 Register output = OutputRegister(rem);
2410 Register temp = temps.AcquireSameSizeAs(output);
2411
2412 __ Sdiv(temp, dividend, divisor);
2413 __ Msub(output, temp, divisor, dividend);
2414 break;
2415 }
2416
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002417 case Primitive::kPrimFloat:
2418 case Primitive::kPrimDouble: {
2419 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
2420 : QUICK_ENTRY_POINT(pFmod);
2421 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc());
2422 break;
2423 }
2424
Serban Constantinescu02164b32014-11-13 14:05:07 +00002425 default:
2426 LOG(FATAL) << "Unexpected rem type " << type;
2427 }
2428}
2429
Alexandre Rames5319def2014-10-23 10:03:10 +01002430void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
2431 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2432 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002433 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01002434}
2435
2436void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002437 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002438 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002439 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002440}
2441
2442void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
2443 instruction->SetLocations(nullptr);
2444}
2445
2446void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002447 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002448 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002449 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002450}
2451
Serban Constantinescu02164b32014-11-13 14:05:07 +00002452void LocationsBuilderARM64::VisitShl(HShl* shl) {
2453 HandleShift(shl);
2454}
2455
2456void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
2457 HandleShift(shl);
2458}
2459
2460void LocationsBuilderARM64::VisitShr(HShr* shr) {
2461 HandleShift(shr);
2462}
2463
2464void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
2465 HandleShift(shr);
2466}
2467
Alexandre Rames5319def2014-10-23 10:03:10 +01002468void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
2469 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
2470 Primitive::Type field_type = store->InputAt(1)->GetType();
2471 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002472 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01002473 case Primitive::kPrimBoolean:
2474 case Primitive::kPrimByte:
2475 case Primitive::kPrimChar:
2476 case Primitive::kPrimShort:
2477 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002478 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01002479 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
2480 break;
2481
2482 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002483 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01002484 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
2485 break;
2486
2487 default:
2488 LOG(FATAL) << "Unimplemented local type " << field_type;
2489 }
2490}
2491
2492void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002493 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01002494}
2495
2496void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002497 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002498}
2499
2500void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002501 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002502}
2503
Alexandre Rames67555f72014-11-18 10:55:16 +00002504void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2505 LocationSummary* locations =
2506 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2507 locations->SetInAt(0, Location::RequiresRegister());
2508 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2509}
2510
2511void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002512 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002513
2514 if (instruction->IsVolatile()) {
2515 if (kUseAcquireRelease) {
2516 codegen_->LoadAcquire(instruction->GetType(), OutputCPURegister(instruction), field);
2517 } else {
2518 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2519 // For IRIW sequential consistency kLoadAny is not sufficient.
2520 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2521 }
2522 } else {
2523 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2524 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002525}
2526
2527void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002528 LocationSummary* locations =
2529 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2530 locations->SetInAt(0, Location::RequiresRegister());
2531 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01002532}
2533
Alexandre Rames67555f72014-11-18 10:55:16 +00002534void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002535 Register cls = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002536 CPURegister value = InputCPURegisterAt(instruction, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002537 Offset offset = instruction->GetFieldOffset();
Alexandre Rames67555f72014-11-18 10:55:16 +00002538 Primitive::Type field_type = instruction->GetFieldType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002539
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002540 if (instruction->IsVolatile()) {
2541 if (kUseAcquireRelease) {
2542 codegen_->StoreRelease(field_type, value, HeapOperand(cls, offset));
2543 } else {
2544 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2545 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2546 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2547 }
2548 } else {
2549 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2550 }
2551
2552 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002553 codegen_->MarkGCCard(cls, Register(value));
2554 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002555}
2556
2557void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2558 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2559}
2560
2561void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002562 HBasicBlock* block = instruction->GetBlock();
2563 if (block->GetLoopInformation() != nullptr) {
2564 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2565 // The back edge will generate the suspend check.
2566 return;
2567 }
2568 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2569 // The goto will generate the suspend check.
2570 return;
2571 }
2572 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01002573}
2574
2575void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
2576 temp->SetLocations(nullptr);
2577}
2578
2579void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
2580 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002581 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01002582}
2583
Alexandre Rames67555f72014-11-18 10:55:16 +00002584void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
2585 LocationSummary* locations =
2586 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2587 InvokeRuntimeCallingConvention calling_convention;
2588 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2589}
2590
2591void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
2592 codegen_->InvokeRuntime(
2593 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002594 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002595}
2596
2597void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
2598 LocationSummary* locations =
2599 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2600 Primitive::Type input_type = conversion->GetInputType();
2601 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002602 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00002603 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
2604 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
2605 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
2606 }
2607
2608 if (IsFPType(input_type)) {
2609 locations->SetInAt(0, Location::RequiresFpuRegister());
2610 } else {
2611 locations->SetInAt(0, Location::RequiresRegister());
2612 }
2613
2614 if (IsFPType(result_type)) {
2615 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2616 } else {
2617 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2618 }
2619}
2620
2621void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
2622 Primitive::Type result_type = conversion->GetResultType();
2623 Primitive::Type input_type = conversion->GetInputType();
2624
2625 DCHECK_NE(input_type, result_type);
2626
2627 if (IsIntegralType(result_type) && IsIntegralType(input_type)) {
2628 int result_size = Primitive::ComponentSize(result_type);
2629 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002630 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002631 Register output = OutputRegister(conversion);
2632 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002633 if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
2634 __ Ubfx(output, source, 0, result_size * kBitsPerByte);
2635 } else if ((result_type == Primitive::kPrimChar) ||
2636 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
2637 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002638 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002639 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002640 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002641 } else if (IsFPType(result_type) && IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002642 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
2643 } else if (IsIntegralType(result_type) && IsFPType(input_type)) {
2644 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
2645 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
2646 } else if (IsFPType(result_type) && IsFPType(input_type)) {
2647 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
2648 } else {
2649 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
2650 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00002651 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002652}
Alexandre Rames67555f72014-11-18 10:55:16 +00002653
Serban Constantinescu02164b32014-11-13 14:05:07 +00002654void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
2655 HandleShift(ushr);
2656}
2657
2658void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
2659 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00002660}
2661
2662void LocationsBuilderARM64::VisitXor(HXor* instruction) {
2663 HandleBinaryOp(instruction);
2664}
2665
2666void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
2667 HandleBinaryOp(instruction);
2668}
2669
2670#undef __
2671#undef QUICK_ENTRY_POINT
2672
Alexandre Rames5319def2014-10-23 10:03:10 +01002673} // namespace arm64
2674} // namespace art