blob: 7b19f44e783e13ab1693585e37397ad4bc361af3 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080020#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010021#include "gc/accounting/card_table.h"
22#include "mirror/array-inl.h"
23#include "mirror/art_method.h"
24#include "mirror/class.h"
25#include "thread.h"
26#include "utils/arm64/assembler_arm64.h"
27#include "utils/assembler.h"
28#include "utils/stack_checks.h"
29
30
31using namespace vixl; // NOLINT(build/namespaces)
32
33#ifdef __
34#error "ARM64 Codegen VIXL macro-assembler macro already defined."
35#endif
36
37
38namespace art {
39
40namespace arm64 {
41
Serban Constantinescu02d81cc2015-01-05 16:08:49 +000042// TODO: Tune the use of Load-Acquire, Store-Release vs Data Memory Barriers.
43// For now we prefer the use of load-acquire, store-release over explicit memory barriers.
44static constexpr bool kUseAcquireRelease = true;
Serban Constantinescu02164b32014-11-13 14:05:07 +000045static constexpr bool kExplicitStackOverflowCheck = false;
Alexandre Rames5319def2014-10-23 10:03:10 +010046static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
47static constexpr int kCurrentMethodStackOffset = 0;
48
49namespace {
Alexandre Ramesa89086e2014-11-07 17:13:25 +000050
51bool IsFPType(Primitive::Type type) {
52 return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble;
53}
54
Alexandre Rames67555f72014-11-18 10:55:16 +000055bool IsIntegralType(Primitive::Type type) {
56 switch (type) {
57 case Primitive::kPrimByte:
58 case Primitive::kPrimChar:
59 case Primitive::kPrimShort:
60 case Primitive::kPrimInt:
61 case Primitive::kPrimLong:
62 return true;
63 default:
64 return false;
65 }
66}
67
Alexandre Ramesa89086e2014-11-07 17:13:25 +000068bool Is64BitType(Primitive::Type type) {
69 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
70}
71
Alexandre Rames5319def2014-10-23 10:03:10 +010072// Convenience helpers to ease conversion to and from VIXL operands.
Alexandre Rames67555f72014-11-18 10:55:16 +000073static_assert((SP == 31) && (WSP == 31) && (XZR == 32) && (WZR == 32),
74 "Unexpected values for register codes.");
Alexandre Rames5319def2014-10-23 10:03:10 +010075
76int VIXLRegCodeFromART(int code) {
Alexandre Rames5319def2014-10-23 10:03:10 +010077 if (code == SP) {
78 return vixl::kSPRegInternalCode;
79 }
80 if (code == XZR) {
81 return vixl::kZeroRegCode;
82 }
83 return code;
84}
85
86int ARTRegCodeFromVIXL(int code) {
Alexandre Rames5319def2014-10-23 10:03:10 +010087 if (code == vixl::kSPRegInternalCode) {
88 return SP;
89 }
90 if (code == vixl::kZeroRegCode) {
91 return XZR;
92 }
93 return code;
94}
95
96Register XRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +000097 DCHECK(location.IsRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +010098 return Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
99}
100
101Register WRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000102 DCHECK(location.IsRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +0100103 return Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
104}
105
106Register RegisterFrom(Location location, Primitive::Type type) {
107 DCHECK(type != Primitive::kPrimVoid && !IsFPType(type));
108 return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
109}
110
111Register OutputRegister(HInstruction* instr) {
112 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
113}
114
115Register InputRegisterAt(HInstruction* instr, int input_index) {
116 return RegisterFrom(instr->GetLocations()->InAt(input_index),
117 instr->InputAt(input_index)->GetType());
118}
119
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000120FPRegister DRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000121 DCHECK(location.IsFpuRegister());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000122 return FPRegister::DRegFromCode(location.reg());
123}
124
125FPRegister SRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000126 DCHECK(location.IsFpuRegister());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000127 return FPRegister::SRegFromCode(location.reg());
128}
129
130FPRegister FPRegisterFrom(Location location, Primitive::Type type) {
131 DCHECK(IsFPType(type));
132 return type == Primitive::kPrimDouble ? DRegisterFrom(location) : SRegisterFrom(location);
133}
134
135FPRegister OutputFPRegister(HInstruction* instr) {
136 return FPRegisterFrom(instr->GetLocations()->Out(), instr->GetType());
137}
138
139FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) {
140 return FPRegisterFrom(instr->GetLocations()->InAt(input_index),
141 instr->InputAt(input_index)->GetType());
142}
143
Alexandre Rames3e69f162014-12-10 10:36:50 +0000144CPURegister CPURegisterFrom(Location location, Primitive::Type type) {
145 return IsFPType(type) ? CPURegister(FPRegisterFrom(location, type))
146 : CPURegister(RegisterFrom(location, type));
147}
148
Alexandre Rames67555f72014-11-18 10:55:16 +0000149CPURegister OutputCPURegister(HInstruction* instr) {
150 return IsFPType(instr->GetType()) ? static_cast<CPURegister>(OutputFPRegister(instr))
151 : static_cast<CPURegister>(OutputRegister(instr));
152}
153
154CPURegister InputCPURegisterAt(HInstruction* instr, int index) {
155 return IsFPType(instr->InputAt(index)->GetType())
156 ? static_cast<CPURegister>(InputFPRegisterAt(instr, index))
157 : static_cast<CPURegister>(InputRegisterAt(instr, index));
158}
159
Alexandre Rames5319def2014-10-23 10:03:10 +0100160int64_t Int64ConstantFrom(Location location) {
161 HConstant* instr = location.GetConstant();
162 return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue()
163 : instr->AsLongConstant()->GetValue();
164}
165
166Operand OperandFrom(Location location, Primitive::Type type) {
167 if (location.IsRegister()) {
168 return Operand(RegisterFrom(location, type));
169 } else {
170 return Operand(Int64ConstantFrom(location));
171 }
172}
173
174Operand InputOperandAt(HInstruction* instr, int input_index) {
175 return OperandFrom(instr->GetLocations()->InAt(input_index),
176 instr->InputAt(input_index)->GetType());
177}
178
179MemOperand StackOperandFrom(Location location) {
180 return MemOperand(sp, location.GetStackIndex());
181}
182
Serban Constantinescu02164b32014-11-13 14:05:07 +0000183MemOperand HeapOperand(const Register& base, size_t offset = 0) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100184 // A heap reference must be 32bit, so fit in a W register.
185 DCHECK(base.IsW());
Alexandre Rames67555f72014-11-18 10:55:16 +0000186 return MemOperand(base.X(), offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100187}
188
Alexandre Rames67555f72014-11-18 10:55:16 +0000189MemOperand HeapOperand(const Register& base, Offset offset) {
190 return HeapOperand(base, offset.SizeValue());
191}
192
193MemOperand HeapOperandFrom(Location location, Offset offset) {
194 return HeapOperand(RegisterFrom(location, Primitive::kPrimNot), offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100195}
196
197Location LocationFrom(const Register& reg) {
198 return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
199}
200
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000201Location LocationFrom(const FPRegister& fpreg) {
202 return Location::FpuRegisterLocation(fpreg.code());
203}
204
Alexandre Rames5319def2014-10-23 10:03:10 +0100205} // namespace
206
207inline Condition ARM64Condition(IfCondition cond) {
208 switch (cond) {
209 case kCondEQ: return eq;
210 case kCondNE: return ne;
211 case kCondLT: return lt;
212 case kCondLE: return le;
213 case kCondGT: return gt;
214 case kCondGE: return ge;
215 default:
216 LOG(FATAL) << "Unknown if condition";
217 }
218 return nv; // Unreachable.
219}
220
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000221Location ARM64ReturnLocation(Primitive::Type return_type) {
222 DCHECK_NE(return_type, Primitive::kPrimVoid);
223 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
224 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
225 // but we use the exact registers for clarity.
226 if (return_type == Primitive::kPrimFloat) {
227 return LocationFrom(s0);
228 } else if (return_type == Primitive::kPrimDouble) {
229 return LocationFrom(d0);
230 } else if (return_type == Primitive::kPrimLong) {
231 return LocationFrom(x0);
232 } else {
233 return LocationFrom(w0);
234 }
235}
236
Alexandre Rames5319def2014-10-23 10:03:10 +0100237static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
238static constexpr size_t kRuntimeParameterCoreRegistersLength =
239 arraysize(kRuntimeParameterCoreRegisters);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000240static const FPRegister kRuntimeParameterFpuRegisters[] = { d0, d1, d2, d3, d4, d5, d6, d7 };
241static constexpr size_t kRuntimeParameterFpuRegistersLength =
242 arraysize(kRuntimeParameterCoreRegisters);
Alexandre Rames5319def2014-10-23 10:03:10 +0100243
244class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
245 public:
246 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
247
248 InvokeRuntimeCallingConvention()
249 : CallingConvention(kRuntimeParameterCoreRegisters,
250 kRuntimeParameterCoreRegistersLength,
251 kRuntimeParameterFpuRegisters,
252 kRuntimeParameterFpuRegistersLength) {}
253
254 Location GetReturnLocation(Primitive::Type return_type);
255
256 private:
257 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
258};
259
260Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000261 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100262}
263
Alexandre Rames67555f72014-11-18 10:55:16 +0000264#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
265#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100266
267class SlowPathCodeARM64 : public SlowPathCode {
268 public:
269 SlowPathCodeARM64() : entry_label_(), exit_label_() {}
270
271 vixl::Label* GetEntryLabel() { return &entry_label_; }
272 vixl::Label* GetExitLabel() { return &exit_label_; }
273
274 private:
275 vixl::Label entry_label_;
276 vixl::Label exit_label_;
277
278 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
279};
280
281class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
282 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000283 BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
284 Location index_location,
285 Location length_location)
286 : instruction_(instruction),
287 index_location_(index_location),
288 length_location_(length_location) {}
289
Alexandre Rames5319def2014-10-23 10:03:10 +0100290
Alexandre Rames67555f72014-11-18 10:55:16 +0000291 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000292 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100293 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000294 // We're moving two locations to locations that could overlap, so we need a parallel
295 // move resolver.
296 InvokeRuntimeCallingConvention calling_convention;
297 codegen->EmitParallelMoves(
298 index_location_, LocationFrom(calling_convention.GetRegisterAt(0)),
299 length_location_, LocationFrom(calling_convention.GetRegisterAt(1)));
300 arm64_codegen->InvokeRuntime(
301 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100303 }
304
305 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000306 HBoundsCheck* const instruction_;
307 const Location index_location_;
308 const Location length_location_;
309
Alexandre Rames5319def2014-10-23 10:03:10 +0100310 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
311};
312
Alexandre Rames67555f72014-11-18 10:55:16 +0000313class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
314 public:
315 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
316
317 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
318 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
319 __ Bind(GetEntryLabel());
320 arm64_codegen->InvokeRuntime(
321 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800322 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000323 }
324
325 private:
326 HDivZeroCheck* const instruction_;
327 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
328};
329
330class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
331 public:
332 LoadClassSlowPathARM64(HLoadClass* cls,
333 HInstruction* at,
334 uint32_t dex_pc,
335 bool do_clinit)
336 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
337 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
338 }
339
340 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
341 LocationSummary* locations = at_->GetLocations();
342 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
343
344 __ Bind(GetEntryLabel());
345 codegen->SaveLiveRegisters(locations);
346
347 InvokeRuntimeCallingConvention calling_convention;
348 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
349 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
350 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
351 : QUICK_ENTRY_POINT(pInitializeType);
352 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800353 if (do_clinit_) {
354 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t, mirror::ArtMethod*>();
355 } else {
356 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t, mirror::ArtMethod*>();
357 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000358
359 // Move the class to the desired location.
360 Location out = locations->Out();
361 if (out.IsValid()) {
362 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
363 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000364 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000365 }
366
367 codegen->RestoreLiveRegisters(locations);
368 __ B(GetExitLabel());
369 }
370
371 private:
372 // The class this slow path will load.
373 HLoadClass* const cls_;
374
375 // The instruction where this slow path is happening.
376 // (Might be the load class or an initialization check).
377 HInstruction* const at_;
378
379 // The dex PC of `at_`.
380 const uint32_t dex_pc_;
381
382 // Whether to initialize the class.
383 const bool do_clinit_;
384
385 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
386};
387
388class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
389 public:
390 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
391
392 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
393 LocationSummary* locations = instruction_->GetLocations();
394 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
395 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
396
397 __ Bind(GetEntryLabel());
398 codegen->SaveLiveRegisters(locations);
399
400 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800401 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
402 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000403 arm64_codegen->InvokeRuntime(
404 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800405 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000406 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000407 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000408
409 codegen->RestoreLiveRegisters(locations);
410 __ B(GetExitLabel());
411 }
412
413 private:
414 HLoadString* const instruction_;
415
416 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
417};
418
Alexandre Rames5319def2014-10-23 10:03:10 +0100419class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
420 public:
421 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
422
Alexandre Rames67555f72014-11-18 10:55:16 +0000423 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
424 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100425 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000426 arm64_codegen->InvokeRuntime(
427 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800428 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100429 }
430
431 private:
432 HNullCheck* const instruction_;
433
434 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
435};
436
Serban Constantinescu02164b32014-11-13 14:05:07 +0000437class StackOverflowCheckSlowPathARM64 : public SlowPathCodeARM64 {
438 public:
439 StackOverflowCheckSlowPathARM64() {}
440
441 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
442 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
443 __ Bind(GetEntryLabel());
444 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowStackOverflow), nullptr, 0);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800445 CheckEntrypointTypes<kQuickThrowStackOverflow, void, void>();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000446 }
447
448 private:
449 DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM64);
450};
451
Alexandre Rames5319def2014-10-23 10:03:10 +0100452class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
453 public:
454 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
455 HBasicBlock* successor)
456 : instruction_(instruction), successor_(successor) {}
457
Alexandre Rames67555f72014-11-18 10:55:16 +0000458 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
459 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100460 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000461 codegen->SaveLiveRegisters(instruction_->GetLocations());
462 arm64_codegen->InvokeRuntime(
463 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800464 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000465 codegen->RestoreLiveRegisters(instruction_->GetLocations());
466 if (successor_ == nullptr) {
467 __ B(GetReturnLabel());
468 } else {
469 __ B(arm64_codegen->GetLabelOf(successor_));
470 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100471 }
472
473 vixl::Label* GetReturnLabel() {
474 DCHECK(successor_ == nullptr);
475 return &return_label_;
476 }
477
Alexandre Rames5319def2014-10-23 10:03:10 +0100478 private:
479 HSuspendCheck* const instruction_;
480 // If not null, the block to branch to after the suspend check.
481 HBasicBlock* const successor_;
482
483 // If `successor_` is null, the label to branch to after the suspend check.
484 vixl::Label return_label_;
485
486 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
487};
488
Alexandre Rames67555f72014-11-18 10:55:16 +0000489class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
490 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000491 TypeCheckSlowPathARM64(HInstruction* instruction,
492 Location class_to_check,
493 Location object_class,
494 uint32_t dex_pc)
495 : instruction_(instruction),
496 class_to_check_(class_to_check),
497 object_class_(object_class),
498 dex_pc_(dex_pc) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000499
500 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000501 LocationSummary* locations = instruction_->GetLocations();
502 DCHECK(instruction_->IsCheckCast()
503 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
504 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
505
Alexandre Rames67555f72014-11-18 10:55:16 +0000506 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000507 codegen->SaveLiveRegisters(locations);
508
509 // We're moving two locations to locations that could overlap, so we need a parallel
510 // move resolver.
511 InvokeRuntimeCallingConvention calling_convention;
512 codegen->EmitParallelMoves(
513 class_to_check_, LocationFrom(calling_convention.GetRegisterAt(0)),
514 object_class_, LocationFrom(calling_convention.GetRegisterAt(1)));
515
516 if (instruction_->IsInstanceOf()) {
517 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
518 Primitive::Type ret_type = instruction_->GetType();
519 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
520 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800521 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
522 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000523 } else {
524 DCHECK(instruction_->IsCheckCast());
525 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800526 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000527 }
528
529 codegen->RestoreLiveRegisters(locations);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000530 __ B(GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000531 }
532
533 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000534 HInstruction* const instruction_;
535 const Location class_to_check_;
536 const Location object_class_;
537 uint32_t dex_pc_;
538
Alexandre Rames67555f72014-11-18 10:55:16 +0000539 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
540};
541
Alexandre Rames5319def2014-10-23 10:03:10 +0100542#undef __
543
544Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
545 Location next_location;
546 if (type == Primitive::kPrimVoid) {
547 LOG(FATAL) << "Unreachable type " << type;
548 }
549
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000550 if (IsFPType(type) && (fp_index_ < calling_convention.GetNumberOfFpuRegisters())) {
551 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(fp_index_++));
552 } else if (!IsFPType(type) && (gp_index_ < calling_convention.GetNumberOfRegisters())) {
553 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
554 } else {
555 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
556 next_location = Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
557 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100558 }
559
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000560 // Space on the stack is reserved for all arguments.
561 stack_index_ += Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100562 return next_location;
563}
564
565CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph)
566 : CodeGenerator(graph,
567 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000568 kNumberOfAllocatableFPRegisters,
Alexandre Rames5319def2014-10-23 10:03:10 +0100569 kNumberOfAllocatableRegisterPairs),
570 block_labels_(nullptr),
571 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000572 instruction_visitor_(graph, this),
573 move_resolver_(graph->GetArena(), this) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100574
Alexandre Rames67555f72014-11-18 10:55:16 +0000575#undef __
576#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100577
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000578void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
579 // Ensure we emit the literal pool.
580 __ FinalizeCode();
581 CodeGenerator::Finalize(allocator);
582}
583
Alexandre Rames3e69f162014-12-10 10:36:50 +0000584void ParallelMoveResolverARM64::EmitMove(size_t index) {
585 MoveOperands* move = moves_.Get(index);
586 codegen_->MoveLocation(move->GetDestination(), move->GetSource());
587}
588
589void ParallelMoveResolverARM64::EmitSwap(size_t index) {
590 MoveOperands* move = moves_.Get(index);
591 codegen_->SwapLocations(move->GetDestination(), move->GetSource());
592}
593
594void ParallelMoveResolverARM64::RestoreScratch(int reg) {
595 __ Pop(Register(VIXLRegCodeFromART(reg), kXRegSize));
596}
597
598void ParallelMoveResolverARM64::SpillScratch(int reg) {
599 __ Push(Register(VIXLRegCodeFromART(reg), kXRegSize));
600}
601
Alexandre Rames5319def2014-10-23 10:03:10 +0100602void CodeGeneratorARM64::GenerateFrameEntry() {
Serban Constantinescu02164b32014-11-13 14:05:07 +0000603 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
604 if (do_overflow_check) {
605 UseScratchRegisterScope temps(GetVIXLAssembler());
606 Register temp = temps.AcquireX();
607 if (kExplicitStackOverflowCheck) {
608 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM64();
609 AddSlowPath(slow_path);
610
611 __ Ldr(temp, MemOperand(tr, Thread::StackEndOffset<kArm64WordSize>().Int32Value()));
612 __ Cmp(sp, temp);
613 __ B(lo, slow_path->GetEntryLabel());
614 } else {
615 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
616 __ Ldr(wzr, MemOperand(temp, 0));
617 RecordPcInfo(nullptr, 0);
618 }
619 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100620
621 CPURegList preserved_regs = GetFramePreservedRegisters();
622 int frame_size = GetFrameSize();
623 core_spill_mask_ |= preserved_regs.list();
624
625 __ Str(w0, MemOperand(sp, -frame_size, PreIndex));
626 __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
627
628 // Stack layout:
629 // sp[frame_size - 8] : lr.
630 // ... : other preserved registers.
631 // sp[frame_size - regs_size]: first preserved register.
632 // ... : reserved frame space.
Alexandre Rames67555f72014-11-18 10:55:16 +0000633 // sp[0] : current method.
Alexandre Rames5319def2014-10-23 10:03:10 +0100634}
635
636void CodeGeneratorARM64::GenerateFrameExit() {
637 int frame_size = GetFrameSize();
638 CPURegList preserved_regs = GetFramePreservedRegisters();
639 __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
640 __ Drop(frame_size);
641}
642
643void CodeGeneratorARM64::Bind(HBasicBlock* block) {
644 __ Bind(GetLabelOf(block));
645}
646
Alexandre Rames5319def2014-10-23 10:03:10 +0100647void CodeGeneratorARM64::Move(HInstruction* instruction,
648 Location location,
649 HInstruction* move_for) {
650 LocationSummary* locations = instruction->GetLocations();
651 if (locations != nullptr && locations->Out().Equals(location)) {
652 return;
653 }
654
655 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000656 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100657
658 if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
659 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
660 : instruction->AsLongConstant()->GetValue();
661 if (location.IsRegister()) {
662 Register dst = RegisterFrom(location, type);
663 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
664 (instruction->IsLongConstant() && dst.Is64Bits()));
665 __ Mov(dst, value);
666 } else {
667 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000668 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100669 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
670 __ Mov(temp, value);
671 __ Str(temp, StackOperandFrom(location));
672 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000673 } else if (instruction->IsTemporary()) {
674 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000675 MoveLocation(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100676 } else if (instruction->IsLoadLocal()) {
677 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000678 if (Is64BitType(type)) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000679 MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000680 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000681 MoveLocation(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100682 }
683
684 } else {
685 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000686 MoveLocation(location, locations->Out(), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100687 }
688}
689
690size_t CodeGeneratorARM64::FrameEntrySpillSize() const {
691 return GetFramePreservedRegistersSize();
692}
693
694Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
695 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000696
Alexandre Rames5319def2014-10-23 10:03:10 +0100697 switch (type) {
698 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000699 case Primitive::kPrimInt:
700 case Primitive::kPrimFloat:
701 return Location::StackSlot(GetStackSlot(load->GetLocal()));
702
703 case Primitive::kPrimLong:
704 case Primitive::kPrimDouble:
705 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
706
Alexandre Rames5319def2014-10-23 10:03:10 +0100707 case Primitive::kPrimBoolean:
708 case Primitive::kPrimByte:
709 case Primitive::kPrimChar:
710 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100711 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100712 LOG(FATAL) << "Unexpected type " << type;
713 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000714
Alexandre Rames5319def2014-10-23 10:03:10 +0100715 LOG(FATAL) << "Unreachable";
716 return Location::NoLocation();
717}
718
719void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000720 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100721 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000722 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +0100723 vixl::Label done;
724 __ Cbz(value, &done);
725 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
726 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000727 __ Strb(card, MemOperand(card, temp.X()));
Alexandre Rames5319def2014-10-23 10:03:10 +0100728 __ Bind(&done);
729}
730
731void CodeGeneratorARM64::SetupBlockedRegisters() const {
732 // Block reserved registers:
733 // ip0 (VIXL temporary)
734 // ip1 (VIXL temporary)
Serban Constantinescu02164b32014-11-13 14:05:07 +0000735 // tr
Alexandre Rames5319def2014-10-23 10:03:10 +0100736 // lr
737 // sp is not part of the allocatable registers, so we don't need to block it.
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000738 // TODO: Avoid blocking callee-saved registers, and instead preserve them
739 // where necessary.
Alexandre Rames5319def2014-10-23 10:03:10 +0100740 CPURegList reserved_core_registers = vixl_reserved_core_registers;
741 reserved_core_registers.Combine(runtime_reserved_core_registers);
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000742 reserved_core_registers.Combine(quick_callee_saved_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100743 while (!reserved_core_registers.IsEmpty()) {
744 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
745 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000746 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
747 reserved_fp_registers.Combine(CPURegList::GetCalleeSavedFP());
748 while (!reserved_core_registers.IsEmpty()) {
749 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
750 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100751}
752
753Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
754 if (type == Primitive::kPrimVoid) {
755 LOG(FATAL) << "Unreachable type " << type;
756 }
757
Alexandre Rames5319def2014-10-23 10:03:10 +0100758 if (IsFPType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000759 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
760 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100761 return Location::FpuRegisterLocation(reg);
762 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000763 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
764 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100765 return Location::RegisterLocation(reg);
766 }
767}
768
Alexandre Rames3e69f162014-12-10 10:36:50 +0000769size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
770 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
771 __ Str(reg, MemOperand(sp, stack_index));
772 return kArm64WordSize;
773}
774
775size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
776 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
777 __ Ldr(reg, MemOperand(sp, stack_index));
778 return kArm64WordSize;
779}
780
781size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
782 FPRegister reg = FPRegister(reg_id, kDRegSize);
783 __ Str(reg, MemOperand(sp, stack_index));
784 return kArm64WordSize;
785}
786
787size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
788 FPRegister reg = FPRegister(reg_id, kDRegSize);
789 __ Ldr(reg, MemOperand(sp, stack_index));
790 return kArm64WordSize;
791}
792
Alexandre Rames5319def2014-10-23 10:03:10 +0100793void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
794 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
795}
796
797void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
798 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
799}
800
Alexandre Rames67555f72014-11-18 10:55:16 +0000801void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
802 if (constant->IsIntConstant() || constant->IsLongConstant()) {
803 __ Mov(Register(destination),
804 constant->IsIntConstant() ? constant->AsIntConstant()->GetValue()
805 : constant->AsLongConstant()->GetValue());
806 } else if (constant->IsFloatConstant()) {
807 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
808 } else {
809 DCHECK(constant->IsDoubleConstant());
810 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
811 }
812}
813
Alexandre Rames3e69f162014-12-10 10:36:50 +0000814
815static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
816 DCHECK(constant.IsConstant());
817 HConstant* cst = constant.GetConstant();
818 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
819 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
820 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
821 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
822}
823
824void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000825 if (source.Equals(destination)) {
826 return;
827 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000828
829 // A valid move can always be inferred from the destination and source
830 // locations. When moving from and to a register, the argument type can be
831 // used to generate 32bit instead of 64bit moves. In debug mode we also
832 // checks the coherency of the locations and the type.
833 bool unspecified_type = (type == Primitive::kPrimVoid);
834
835 if (destination.IsRegister() || destination.IsFpuRegister()) {
836 if (unspecified_type) {
837 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
838 if (source.IsStackSlot() ||
839 (src_cst != nullptr && (src_cst->IsIntConstant() || src_cst->IsFloatConstant()))) {
840 // For stack slots and 32bit constants, a 64bit type is appropriate.
841 type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +0000842 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000843 // If the source is a double stack slot or a 64bit constant, a 64bit
844 // type is appropriate. Else the source is a register, and since the
845 // type has not been specified, we chose a 64bit type to force a 64bit
846 // move.
847 type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +0000848 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000849 }
850 DCHECK((destination.IsFpuRegister() && IsFPType(type)) ||
851 (destination.IsRegister() && !IsFPType(type)));
852 CPURegister dst = CPURegisterFrom(destination, type);
853 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
854 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
855 __ Ldr(dst, StackOperandFrom(source));
856 } else if (source.IsConstant()) {
857 DCHECK(CoherentConstantAndType(source, type));
858 MoveConstant(dst, source.GetConstant());
859 } else {
860 if (destination.IsRegister()) {
861 __ Mov(Register(dst), RegisterFrom(source, type));
862 } else {
863 __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
864 }
865 }
866
867 } else { // The destination is not a register. It must be a stack slot.
868 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
869 if (source.IsRegister() || source.IsFpuRegister()) {
870 if (unspecified_type) {
871 if (source.IsRegister()) {
872 type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
873 } else {
874 type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
875 }
876 }
877 DCHECK((destination.IsDoubleStackSlot() == Is64BitType(type)) &&
878 (source.IsFpuRegister() == IsFPType(type)));
879 __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
880 } else if (source.IsConstant()) {
881 DCHECK(unspecified_type || CoherentConstantAndType(source, type));
882 UseScratchRegisterScope temps(GetVIXLAssembler());
883 HConstant* src_cst = source.GetConstant();
884 CPURegister temp;
885 if (src_cst->IsIntConstant()) {
886 temp = temps.AcquireW();
887 } else if (src_cst->IsLongConstant()) {
888 temp = temps.AcquireX();
889 } else if (src_cst->IsFloatConstant()) {
890 temp = temps.AcquireS();
891 } else {
892 DCHECK(src_cst->IsDoubleConstant());
893 temp = temps.AcquireD();
894 }
895 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000896 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000897 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +0000898 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000899 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000900 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000901 // There is generally less pressure on FP registers.
902 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000903 __ Ldr(temp, StackOperandFrom(source));
904 __ Str(temp, StackOperandFrom(destination));
905 }
906 }
907}
908
Alexandre Rames3e69f162014-12-10 10:36:50 +0000909void CodeGeneratorARM64::SwapLocations(Location loc1, Location loc2) {
910 DCHECK(!loc1.IsConstant());
911 DCHECK(!loc2.IsConstant());
912
913 if (loc1.Equals(loc2)) {
914 return;
915 }
916
917 UseScratchRegisterScope temps(GetAssembler()->vixl_masm_);
918
919 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
920 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
921 bool is_fp_reg1 = loc1.IsFpuRegister();
922 bool is_fp_reg2 = loc2.IsFpuRegister();
923
924 if (loc2.IsRegister() && loc1.IsRegister()) {
925 Register r1 = XRegisterFrom(loc1);
926 Register r2 = XRegisterFrom(loc2);
927 Register tmp = temps.AcquireSameSizeAs(r1);
928 __ Mov(tmp, r2);
929 __ Mov(r2, r1);
930 __ Mov(r1, tmp);
931 } else if (is_fp_reg2 && is_fp_reg1) {
932 FPRegister r1 = DRegisterFrom(loc1);
933 FPRegister r2 = DRegisterFrom(loc2);
934 FPRegister tmp = temps.AcquireSameSizeAs(r1);
935 __ Fmov(tmp, r2);
936 __ Fmov(r2, r1);
937 __ Fmov(r1, tmp);
938 } else if (is_slot1 != is_slot2) {
939 MemOperand mem = StackOperandFrom(is_slot1 ? loc1 : loc2);
940 Location reg_loc = is_slot1 ? loc2 : loc1;
941 CPURegister reg, tmp;
942 if (reg_loc.IsFpuRegister()) {
943 reg = DRegisterFrom(reg_loc);
944 tmp = temps.AcquireD();
945 } else {
946 reg = XRegisterFrom(reg_loc);
947 tmp = temps.AcquireX();
948 }
949 __ Ldr(tmp, mem);
950 __ Str(reg, mem);
951 if (reg_loc.IsFpuRegister()) {
952 __ Fmov(FPRegister(reg), FPRegister(tmp));
953 } else {
954 __ Mov(Register(reg), Register(tmp));
955 }
956 } else if (is_slot1 && is_slot2) {
957 MemOperand mem1 = StackOperandFrom(loc1);
958 MemOperand mem2 = StackOperandFrom(loc2);
959 Register tmp1 = loc1.IsStackSlot() ? temps.AcquireW() : temps.AcquireX();
960 Register tmp2 = temps.AcquireSameSizeAs(tmp1);
961 __ Ldr(tmp1, mem1);
962 __ Ldr(tmp2, mem2);
963 __ Str(tmp1, mem2);
964 __ Str(tmp2, mem1);
965 } else {
966 LOG(FATAL) << "Unimplemented";
967 }
968}
969
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000970void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000971 CPURegister dst,
972 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000973 switch (type) {
974 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +0000975 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000976 break;
977 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +0000978 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000979 break;
980 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +0000981 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000982 break;
983 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +0000984 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000985 break;
986 case Primitive::kPrimInt:
987 case Primitive::kPrimNot:
988 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000989 case Primitive::kPrimFloat:
990 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000991 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +0000992 __ Ldr(dst, src);
993 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000994 case Primitive::kPrimVoid:
995 LOG(FATAL) << "Unreachable type " << type;
996 }
997}
998
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000999void CodeGeneratorARM64::LoadAcquire(Primitive::Type type,
1000 CPURegister dst,
1001 const MemOperand& src) {
1002 UseScratchRegisterScope temps(GetVIXLAssembler());
1003 Register temp_base = temps.AcquireX();
1004
1005 DCHECK(!src.IsRegisterOffset());
1006 DCHECK(!src.IsPreIndex());
1007 DCHECK(!src.IsPostIndex());
1008
1009 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1010 __ Add(temp_base, src.base(), src.offset());
1011 MemOperand base = MemOperand(temp_base);
1012 switch (type) {
1013 case Primitive::kPrimBoolean:
1014 __ Ldarb(Register(dst), base);
1015 break;
1016 case Primitive::kPrimByte:
1017 __ Ldarb(Register(dst), base);
1018 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1019 break;
1020 case Primitive::kPrimChar:
1021 __ Ldarh(Register(dst), base);
1022 break;
1023 case Primitive::kPrimShort:
1024 __ Ldarh(Register(dst), base);
1025 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1026 break;
1027 case Primitive::kPrimInt:
1028 case Primitive::kPrimNot:
1029 case Primitive::kPrimLong:
1030 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
1031 __ Ldar(Register(dst), base);
1032 break;
1033 case Primitive::kPrimFloat:
1034 case Primitive::kPrimDouble: {
1035 DCHECK(dst.IsFPRegister());
1036 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
1037
1038 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1039 __ Ldar(temp, base);
1040 __ Fmov(FPRegister(dst), temp);
1041 break;
1042 }
1043 case Primitive::kPrimVoid:
1044 LOG(FATAL) << "Unreachable type " << type;
1045 }
1046}
1047
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001048void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001049 CPURegister src,
1050 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001051 switch (type) {
1052 case Primitive::kPrimBoolean:
1053 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001054 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001055 break;
1056 case Primitive::kPrimChar:
1057 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001058 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001059 break;
1060 case Primitive::kPrimInt:
1061 case Primitive::kPrimNot:
1062 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001063 case Primitive::kPrimFloat:
1064 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001065 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1066 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001067 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001068 case Primitive::kPrimVoid:
1069 LOG(FATAL) << "Unreachable type " << type;
1070 }
1071}
1072
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001073void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1074 CPURegister src,
1075 const MemOperand& dst) {
1076 UseScratchRegisterScope temps(GetVIXLAssembler());
1077 Register temp_base = temps.AcquireX();
1078
1079 DCHECK(!dst.IsRegisterOffset());
1080 DCHECK(!dst.IsPreIndex());
1081 DCHECK(!dst.IsPostIndex());
1082
1083 // TODO(vixl): Let the MacroAssembler handle this.
1084 __ Add(temp_base, dst.base(), dst.offset());
1085 MemOperand base = MemOperand(temp_base);
1086 switch (type) {
1087 case Primitive::kPrimBoolean:
1088 case Primitive::kPrimByte:
1089 __ Stlrb(Register(src), base);
1090 break;
1091 case Primitive::kPrimChar:
1092 case Primitive::kPrimShort:
1093 __ Stlrh(Register(src), base);
1094 break;
1095 case Primitive::kPrimInt:
1096 case Primitive::kPrimNot:
1097 case Primitive::kPrimLong:
1098 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1099 __ Stlr(Register(src), base);
1100 break;
1101 case Primitive::kPrimFloat:
1102 case Primitive::kPrimDouble: {
1103 DCHECK(src.IsFPRegister());
1104 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1105
1106 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1107 __ Fmov(temp, FPRegister(src));
1108 __ Stlr(temp, base);
1109 break;
1110 }
1111 case Primitive::kPrimVoid:
1112 LOG(FATAL) << "Unreachable type " << type;
1113 }
1114}
1115
Alexandre Rames67555f72014-11-18 10:55:16 +00001116void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) {
1117 DCHECK(current_method.IsW());
1118 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1119}
1120
1121void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1122 HInstruction* instruction,
1123 uint32_t dex_pc) {
1124 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1125 __ Blr(lr);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001126 if (instruction != nullptr) {
1127 RecordPcInfo(instruction, dex_pc);
1128 DCHECK(instruction->IsSuspendCheck()
1129 || instruction->IsBoundsCheck()
1130 || instruction->IsNullCheck()
1131 || instruction->IsDivZeroCheck()
1132 || !IsLeafMethod());
1133 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001134}
1135
1136void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1137 vixl::Register class_reg) {
1138 UseScratchRegisterScope temps(GetVIXLAssembler());
1139 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001140 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1141
Serban Constantinescu02164b32014-11-13 14:05:07 +00001142 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001143 if (kUseAcquireRelease) {
1144 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1145 __ Add(temp, class_reg, status_offset);
1146 __ Ldar(temp, HeapOperand(temp));
1147 __ Cmp(temp, mirror::Class::kStatusInitialized);
1148 __ B(lt, slow_path->GetEntryLabel());
1149 } else {
1150 __ Ldr(temp, HeapOperand(class_reg, status_offset));
1151 __ Cmp(temp, mirror::Class::kStatusInitialized);
1152 __ B(lt, slow_path->GetEntryLabel());
1153 __ Dmb(InnerShareable, BarrierReads);
1154 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001155 __ Bind(slow_path->GetExitLabel());
1156}
Alexandre Rames5319def2014-10-23 10:03:10 +01001157
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001158void InstructionCodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
1159 BarrierType type = BarrierAll;
1160
1161 switch (kind) {
1162 case MemBarrierKind::kAnyAny:
1163 case MemBarrierKind::kAnyStore: {
1164 type = BarrierAll;
1165 break;
1166 }
1167 case MemBarrierKind::kLoadAny: {
1168 type = BarrierReads;
1169 break;
1170 }
1171 case MemBarrierKind::kStoreStore: {
1172 type = BarrierWrites;
1173 break;
1174 }
1175 default:
1176 LOG(FATAL) << "Unexpected memory barrier " << kind;
1177 }
1178 __ Dmb(InnerShareable, type);
1179}
1180
Serban Constantinescu02164b32014-11-13 14:05:07 +00001181void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1182 HBasicBlock* successor) {
1183 SuspendCheckSlowPathARM64* slow_path =
1184 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1185 codegen_->AddSlowPath(slow_path);
1186 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1187 Register temp = temps.AcquireW();
1188
1189 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1190 if (successor == nullptr) {
1191 __ Cbnz(temp, slow_path->GetEntryLabel());
1192 __ Bind(slow_path->GetReturnLabel());
1193 } else {
1194 __ Cbz(temp, codegen_->GetLabelOf(successor));
1195 __ B(slow_path->GetEntryLabel());
1196 // slow_path will return to GetLabelOf(successor).
1197 }
1198}
1199
Alexandre Rames5319def2014-10-23 10:03:10 +01001200InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1201 CodeGeneratorARM64* codegen)
1202 : HGraphVisitor(graph),
1203 assembler_(codegen->GetAssembler()),
1204 codegen_(codegen) {}
1205
1206#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001207 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001208
1209#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1210
1211enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001212 // Using a base helps identify when we hit such breakpoints.
1213 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001214#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1215 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1216#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1217};
1218
1219#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
1220 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001221 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +01001222 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1223 } \
1224 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1225 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1226 locations->SetOut(Location::Any()); \
1227 }
1228 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1229#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1230
1231#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001232#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001233
Alexandre Rames67555f72014-11-18 10:55:16 +00001234void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001235 DCHECK_EQ(instr->InputCount(), 2U);
1236 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1237 Primitive::Type type = instr->GetResultType();
1238 switch (type) {
1239 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001240 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001241 locations->SetInAt(0, Location::RequiresRegister());
1242 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001243 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001244 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001245
1246 case Primitive::kPrimFloat:
1247 case Primitive::kPrimDouble:
1248 locations->SetInAt(0, Location::RequiresFpuRegister());
1249 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001250 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001251 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001252
Alexandre Rames5319def2014-10-23 10:03:10 +01001253 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001254 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001255 }
1256}
1257
Alexandre Rames67555f72014-11-18 10:55:16 +00001258void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001259 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001260
1261 switch (type) {
1262 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001263 case Primitive::kPrimLong: {
1264 Register dst = OutputRegister(instr);
1265 Register lhs = InputRegisterAt(instr, 0);
1266 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001267 if (instr->IsAdd()) {
1268 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001269 } else if (instr->IsAnd()) {
1270 __ And(dst, lhs, rhs);
1271 } else if (instr->IsOr()) {
1272 __ Orr(dst, lhs, rhs);
1273 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001274 __ Sub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001275 } else {
1276 DCHECK(instr->IsXor());
1277 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001278 }
1279 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001280 }
1281 case Primitive::kPrimFloat:
1282 case Primitive::kPrimDouble: {
1283 FPRegister dst = OutputFPRegister(instr);
1284 FPRegister lhs = InputFPRegisterAt(instr, 0);
1285 FPRegister rhs = InputFPRegisterAt(instr, 1);
1286 if (instr->IsAdd()) {
1287 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001288 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001289 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001290 } else {
1291 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001292 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001293 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001294 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001295 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001296 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001297 }
1298}
1299
Serban Constantinescu02164b32014-11-13 14:05:07 +00001300void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1301 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1302
1303 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1304 Primitive::Type type = instr->GetResultType();
1305 switch (type) {
1306 case Primitive::kPrimInt:
1307 case Primitive::kPrimLong: {
1308 locations->SetInAt(0, Location::RequiresRegister());
1309 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1310 locations->SetOut(Location::RequiresRegister());
1311 break;
1312 }
1313 default:
1314 LOG(FATAL) << "Unexpected shift type " << type;
1315 }
1316}
1317
1318void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1319 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1320
1321 Primitive::Type type = instr->GetType();
1322 switch (type) {
1323 case Primitive::kPrimInt:
1324 case Primitive::kPrimLong: {
1325 Register dst = OutputRegister(instr);
1326 Register lhs = InputRegisterAt(instr, 0);
1327 Operand rhs = InputOperandAt(instr, 1);
1328 if (rhs.IsImmediate()) {
1329 uint32_t shift_value = (type == Primitive::kPrimInt)
1330 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1331 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1332 if (instr->IsShl()) {
1333 __ Lsl(dst, lhs, shift_value);
1334 } else if (instr->IsShr()) {
1335 __ Asr(dst, lhs, shift_value);
1336 } else {
1337 __ Lsr(dst, lhs, shift_value);
1338 }
1339 } else {
1340 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1341
1342 if (instr->IsShl()) {
1343 __ Lsl(dst, lhs, rhs_reg);
1344 } else if (instr->IsShr()) {
1345 __ Asr(dst, lhs, rhs_reg);
1346 } else {
1347 __ Lsr(dst, lhs, rhs_reg);
1348 }
1349 }
1350 break;
1351 }
1352 default:
1353 LOG(FATAL) << "Unexpected shift operation type " << type;
1354 }
1355}
1356
Alexandre Rames5319def2014-10-23 10:03:10 +01001357void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001358 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001359}
1360
1361void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001362 HandleBinaryOp(instruction);
1363}
1364
1365void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1366 HandleBinaryOp(instruction);
1367}
1368
1369void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1370 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001371}
1372
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001373void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1374 LocationSummary* locations =
1375 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1376 locations->SetInAt(0, Location::RequiresRegister());
1377 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1378 locations->SetOut(Location::RequiresRegister());
1379}
1380
1381void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1382 LocationSummary* locations = instruction->GetLocations();
1383 Primitive::Type type = instruction->GetType();
1384 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001385 Location index = locations->InAt(1);
1386 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001387 MemOperand source = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001388 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001389
1390 if (index.IsConstant()) {
1391 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001392 source = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001393 } else {
1394 Register temp = temps.AcquireSameSizeAs(obj);
1395 Register index_reg = RegisterFrom(index, Primitive::kPrimInt);
1396 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001397 source = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001398 }
1399
Alexandre Rames67555f72014-11-18 10:55:16 +00001400 codegen_->Load(type, OutputCPURegister(instruction), source);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001401}
1402
Alexandre Rames5319def2014-10-23 10:03:10 +01001403void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1404 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1405 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001406 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001407}
1408
1409void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
1410 __ Ldr(OutputRegister(instruction),
1411 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
1412}
1413
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001414void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
1415 Primitive::Type value_type = instruction->GetComponentType();
1416 bool is_object = value_type == Primitive::kPrimNot;
1417 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1418 instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
1419 if (is_object) {
1420 InvokeRuntimeCallingConvention calling_convention;
1421 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1422 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1423 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1424 } else {
1425 locations->SetInAt(0, Location::RequiresRegister());
1426 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1427 locations->SetInAt(2, Location::RequiresRegister());
1428 }
1429}
1430
1431void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1432 Primitive::Type value_type = instruction->GetComponentType();
1433 if (value_type == Primitive::kPrimNot) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001434 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001435 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001436 } else {
1437 LocationSummary* locations = instruction->GetLocations();
1438 Register obj = InputRegisterAt(instruction, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001439 CPURegister value = InputCPURegisterAt(instruction, 2);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001440 Location index = locations->InAt(1);
1441 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001442 MemOperand destination = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001443 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001444
1445 if (index.IsConstant()) {
1446 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001447 destination = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001448 } else {
1449 Register temp = temps.AcquireSameSizeAs(obj);
1450 Register index_reg = InputRegisterAt(instruction, 1);
1451 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001452 destination = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001453 }
1454
1455 codegen_->Store(value_type, value, destination);
1456 }
1457}
1458
Alexandre Rames67555f72014-11-18 10:55:16 +00001459void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1460 LocationSummary* locations =
1461 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1462 locations->SetInAt(0, Location::RequiresRegister());
1463 locations->SetInAt(1, Location::RequiresRegister());
1464 if (instruction->HasUses()) {
1465 locations->SetOut(Location::SameAsFirstInput());
1466 }
1467}
1468
1469void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001470 LocationSummary* locations = instruction->GetLocations();
1471 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1472 instruction, locations->InAt(0), locations->InAt(1));
Alexandre Rames67555f72014-11-18 10:55:16 +00001473 codegen_->AddSlowPath(slow_path);
1474
1475 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1476 __ B(slow_path->GetEntryLabel(), hs);
1477}
1478
1479void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
1480 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1481 instruction, LocationSummary::kCallOnSlowPath);
1482 locations->SetInAt(0, Location::RequiresRegister());
1483 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001484 locations->AddTemp(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001485}
1486
1487void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001488 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +00001489 Register obj = InputRegisterAt(instruction, 0);;
1490 Register cls = InputRegisterAt(instruction, 1);;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001491 Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
Alexandre Rames67555f72014-11-18 10:55:16 +00001492
Alexandre Rames3e69f162014-12-10 10:36:50 +00001493 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1494 instruction, locations->InAt(1), LocationFrom(obj_cls), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001495 codegen_->AddSlowPath(slow_path);
1496
1497 // TODO: avoid this check if we know obj is not null.
1498 __ Cbz(obj, slow_path->GetExitLabel());
1499 // Compare the class of `obj` with `cls`.
Alexandre Rames3e69f162014-12-10 10:36:50 +00001500 __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
1501 __ Cmp(obj_cls, cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00001502 __ B(ne, slow_path->GetEntryLabel());
1503 __ Bind(slow_path->GetExitLabel());
1504}
1505
1506void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1507 LocationSummary* locations =
1508 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1509 locations->SetInAt(0, Location::RequiresRegister());
1510 if (check->HasUses()) {
1511 locations->SetOut(Location::SameAsFirstInput());
1512 }
1513}
1514
1515void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1516 // We assume the class is not null.
1517 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1518 check->GetLoadClass(), check, check->GetDexPc(), true);
1519 codegen_->AddSlowPath(slow_path);
1520 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1521}
1522
Serban Constantinescu02164b32014-11-13 14:05:07 +00001523void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001524 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00001525 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1526 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001527 switch (in_type) {
1528 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001529 locations->SetInAt(0, Location::RequiresRegister());
1530 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
1531 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1532 break;
1533 }
1534 case Primitive::kPrimFloat:
1535 case Primitive::kPrimDouble: {
1536 locations->SetInAt(0, Location::RequiresFpuRegister());
1537 locations->SetInAt(1, Location::RequiresFpuRegister());
1538 locations->SetOut(Location::RequiresRegister());
1539 break;
1540 }
1541 default:
1542 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1543 }
1544}
1545
1546void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1547 Primitive::Type in_type = compare->InputAt(0)->GetType();
1548
1549 // 0 if: left == right
1550 // 1 if: left > right
1551 // -1 if: left < right
1552 switch (in_type) {
1553 case Primitive::kPrimLong: {
1554 Register result = OutputRegister(compare);
1555 Register left = InputRegisterAt(compare, 0);
1556 Operand right = InputOperandAt(compare, 1);
1557
1558 __ Cmp(left, right);
1559 __ Cset(result, ne);
1560 __ Cneg(result, result, lt);
1561 break;
1562 }
1563 case Primitive::kPrimFloat:
1564 case Primitive::kPrimDouble: {
1565 Register result = OutputRegister(compare);
1566 FPRegister left = InputFPRegisterAt(compare, 0);
1567 FPRegister right = InputFPRegisterAt(compare, 1);
1568
1569 __ Fcmp(left, right);
1570 if (compare->IsGtBias()) {
1571 __ Cset(result, ne);
1572 } else {
1573 __ Csetm(result, ne);
1574 }
1575 __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
Alexandre Rames5319def2014-10-23 10:03:10 +01001576 break;
1577 }
1578 default:
1579 LOG(FATAL) << "Unimplemented compare type " << in_type;
1580 }
1581}
1582
1583void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1584 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1585 locations->SetInAt(0, Location::RequiresRegister());
1586 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1587 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001588 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001589 }
1590}
1591
1592void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1593 if (!instruction->NeedsMaterialization()) {
1594 return;
1595 }
1596
1597 LocationSummary* locations = instruction->GetLocations();
1598 Register lhs = InputRegisterAt(instruction, 0);
1599 Operand rhs = InputOperandAt(instruction, 1);
1600 Register res = RegisterFrom(locations->Out(), instruction->GetType());
1601 Condition cond = ARM64Condition(instruction->GetCondition());
1602
1603 __ Cmp(lhs, rhs);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001604 __ Cset(res, cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001605}
1606
1607#define FOR_EACH_CONDITION_INSTRUCTION(M) \
1608 M(Equal) \
1609 M(NotEqual) \
1610 M(LessThan) \
1611 M(LessThanOrEqual) \
1612 M(GreaterThan) \
1613 M(GreaterThanOrEqual)
1614#define DEFINE_CONDITION_VISITORS(Name) \
1615void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
1616void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1617FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00001618#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01001619#undef FOR_EACH_CONDITION_INSTRUCTION
1620
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001621void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1622 LocationSummary* locations =
1623 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1624 switch (div->GetResultType()) {
1625 case Primitive::kPrimInt:
1626 case Primitive::kPrimLong:
1627 locations->SetInAt(0, Location::RequiresRegister());
1628 locations->SetInAt(1, Location::RequiresRegister());
1629 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1630 break;
1631
1632 case Primitive::kPrimFloat:
1633 case Primitive::kPrimDouble:
1634 locations->SetInAt(0, Location::RequiresFpuRegister());
1635 locations->SetInAt(1, Location::RequiresFpuRegister());
1636 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1637 break;
1638
1639 default:
1640 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1641 }
1642}
1643
1644void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1645 Primitive::Type type = div->GetResultType();
1646 switch (type) {
1647 case Primitive::kPrimInt:
1648 case Primitive::kPrimLong:
1649 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
1650 break;
1651
1652 case Primitive::kPrimFloat:
1653 case Primitive::kPrimDouble:
1654 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1655 break;
1656
1657 default:
1658 LOG(FATAL) << "Unexpected div type " << type;
1659 }
1660}
1661
Alexandre Rames67555f72014-11-18 10:55:16 +00001662void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1663 LocationSummary* locations =
1664 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1665 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1666 if (instruction->HasUses()) {
1667 locations->SetOut(Location::SameAsFirstInput());
1668 }
1669}
1670
1671void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1672 SlowPathCodeARM64* slow_path =
1673 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
1674 codegen_->AddSlowPath(slow_path);
1675 Location value = instruction->GetLocations()->InAt(0);
1676
Alexandre Rames3e69f162014-12-10 10:36:50 +00001677 Primitive::Type type = instruction->GetType();
1678
1679 if ((type != Primitive::kPrimInt) && (type != Primitive::kPrimLong)) {
1680 LOG(FATAL) << "Unexpected type " << type << "for DivZeroCheck.";
1681 return;
1682 }
1683
Alexandre Rames67555f72014-11-18 10:55:16 +00001684 if (value.IsConstant()) {
1685 int64_t divisor = Int64ConstantFrom(value);
1686 if (divisor == 0) {
1687 __ B(slow_path->GetEntryLabel());
1688 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001689 // A division by a non-null constant is valid. We don't need to perform
1690 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00001691 }
1692 } else {
1693 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
1694 }
1695}
1696
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001697void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1698 LocationSummary* locations =
1699 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1700 locations->SetOut(Location::ConstantLocation(constant));
1701}
1702
1703void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1704 UNUSED(constant);
1705 // Will be generated at use site.
1706}
1707
Alexandre Rames5319def2014-10-23 10:03:10 +01001708void LocationsBuilderARM64::VisitExit(HExit* exit) {
1709 exit->SetLocations(nullptr);
1710}
1711
1712void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001713 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +01001714 if (kIsDebugBuild) {
1715 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
Alexandre Rames67555f72014-11-18 10:55:16 +00001716 __ Brk(__LINE__); // TODO: Introduce special markers for such code locations.
Alexandre Rames5319def2014-10-23 10:03:10 +01001717 }
1718}
1719
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001720void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
1721 LocationSummary* locations =
1722 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1723 locations->SetOut(Location::ConstantLocation(constant));
1724}
1725
1726void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
1727 UNUSED(constant);
1728 // Will be generated at use site.
1729}
1730
Alexandre Rames5319def2014-10-23 10:03:10 +01001731void LocationsBuilderARM64::VisitGoto(HGoto* got) {
1732 got->SetLocations(nullptr);
1733}
1734
1735void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
1736 HBasicBlock* successor = got->GetSuccessor();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001737 DCHECK(!successor->IsExitBlock());
1738 HBasicBlock* block = got->GetBlock();
1739 HInstruction* previous = got->GetPrevious();
1740 HLoopInformation* info = block->GetLoopInformation();
1741
1742 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
1743 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
1744 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1745 return;
1746 }
1747 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1748 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1749 }
1750 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001751 __ B(codegen_->GetLabelOf(successor));
1752 }
1753}
1754
1755void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
1756 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1757 HInstruction* cond = if_instr->InputAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001758 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001759 locations->SetInAt(0, Location::RequiresRegister());
1760 }
1761}
1762
1763void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
1764 HInstruction* cond = if_instr->InputAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001765 HCondition* condition = cond->AsCondition();
1766 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1767 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1768
Serban Constantinescu02164b32014-11-13 14:05:07 +00001769 if (cond->IsIntConstant()) {
1770 int32_t cond_value = cond->AsIntConstant()->GetValue();
1771 if (cond_value == 1) {
1772 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfTrueSuccessor())) {
1773 __ B(true_target);
1774 }
1775 return;
1776 } else {
1777 DCHECK_EQ(cond_value, 0);
1778 }
1779 } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001780 // The condition instruction has been materialized, compare the output to 0.
1781 Location cond_val = if_instr->GetLocations()->InAt(0);
1782 DCHECK(cond_val.IsRegister());
1783 __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001784 } else {
1785 // The condition instruction has not been materialized, use its inputs as
1786 // the comparison and its condition as the branch condition.
1787 Register lhs = InputRegisterAt(condition, 0);
1788 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001789 Condition arm64_cond = ARM64Condition(condition->GetCondition());
1790 if ((arm64_cond == eq || arm64_cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
1791 if (arm64_cond == eq) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001792 __ Cbz(lhs, true_target);
1793 } else {
1794 __ Cbnz(lhs, true_target);
1795 }
1796 } else {
1797 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001798 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001799 }
1800 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001801 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
1802 __ B(false_target);
1803 }
1804}
1805
1806void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001807 LocationSummary* locations =
1808 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001809 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001810 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001811}
1812
1813void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001814 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001815
1816 if (instruction->IsVolatile()) {
1817 if (kUseAcquireRelease) {
1818 codegen_->LoadAcquire(instruction->GetType(), OutputCPURegister(instruction), field);
1819 } else {
1820 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
1821 // For IRIW sequential consistency kLoadAny is not sufficient.
1822 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1823 }
1824 } else {
1825 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
1826 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001827}
1828
1829void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001830 LocationSummary* locations =
1831 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001832 locations->SetInAt(0, Location::RequiresRegister());
1833 locations->SetInAt(1, Location::RequiresRegister());
1834}
1835
1836void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001837 Register obj = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001838 CPURegister value = InputCPURegisterAt(instruction, 1);
1839 Offset offset = instruction->GetFieldOffset();
1840 Primitive::Type field_type = instruction->GetFieldType();
1841
1842 if (instruction->IsVolatile()) {
1843 if (kUseAcquireRelease) {
1844 codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
1845 } else {
1846 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
1847 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1848 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1849 }
1850 } else {
1851 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1852 }
1853
1854 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001855 codegen_->MarkGCCard(obj, Register(value));
Alexandre Rames5319def2014-10-23 10:03:10 +01001856 }
1857}
1858
Alexandre Rames67555f72014-11-18 10:55:16 +00001859void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
1860 LocationSummary::CallKind call_kind =
1861 instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
1862 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
1863 locations->SetInAt(0, Location::RequiresRegister());
1864 locations->SetInAt(1, Location::RequiresRegister());
1865 locations->SetOut(Location::RequiresRegister(), true); // The output does overlap inputs.
1866}
1867
1868void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
1869 LocationSummary* locations = instruction->GetLocations();
1870 Register obj = InputRegisterAt(instruction, 0);;
1871 Register cls = InputRegisterAt(instruction, 1);;
1872 Register out = OutputRegister(instruction);
1873
1874 vixl::Label done;
1875
1876 // Return 0 if `obj` is null.
1877 // TODO: Avoid this check if we know `obj` is not null.
1878 __ Mov(out, 0);
1879 __ Cbz(obj, &done);
1880
1881 // Compare the class of `obj` with `cls`.
Serban Constantinescu02164b32014-11-13 14:05:07 +00001882 __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
Alexandre Rames67555f72014-11-18 10:55:16 +00001883 __ Cmp(out, cls);
1884 if (instruction->IsClassFinal()) {
1885 // Classes must be equal for the instanceof to succeed.
1886 __ Cset(out, eq);
1887 } else {
1888 // If the classes are not equal, we go into a slow path.
1889 DCHECK(locations->OnlyCallsOnSlowPath());
1890 SlowPathCodeARM64* slow_path =
Alexandre Rames3e69f162014-12-10 10:36:50 +00001891 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1892 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001893 codegen_->AddSlowPath(slow_path);
1894 __ B(ne, slow_path->GetEntryLabel());
1895 __ Mov(out, 1);
1896 __ Bind(slow_path->GetExitLabel());
1897 }
1898
1899 __ Bind(&done);
1900}
1901
Alexandre Rames5319def2014-10-23 10:03:10 +01001902void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
1903 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1904 locations->SetOut(Location::ConstantLocation(constant));
1905}
1906
1907void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
1908 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001909 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001910}
1911
Alexandre Rames5319def2014-10-23 10:03:10 +01001912void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
1913 LocationSummary* locations =
1914 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1915 locations->AddTemp(LocationFrom(x0));
1916
1917 InvokeDexCallingConventionVisitor calling_convention_visitor;
1918 for (size_t i = 0; i < invoke->InputCount(); i++) {
1919 HInstruction* input = invoke->InputAt(i);
1920 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1921 }
1922
1923 Primitive::Type return_type = invoke->GetType();
1924 if (return_type != Primitive::kPrimVoid) {
1925 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
1926 }
1927}
1928
Alexandre Rames67555f72014-11-18 10:55:16 +00001929void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1930 HandleInvoke(invoke);
1931}
1932
1933void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1934 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1935 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1936 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1937 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1938 Location receiver = invoke->GetLocations()->InAt(0);
1939 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00001940 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00001941
1942 // The register ip1 is required to be used for the hidden argument in
1943 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
1944 UseScratchRegisterScope scratch_scope(GetVIXLAssembler());
1945 scratch_scope.Exclude(ip1);
1946 __ Mov(ip1, invoke->GetDexMethodIndex());
1947
1948 // temp = object->GetClass();
1949 if (receiver.IsStackSlot()) {
1950 __ Ldr(temp, StackOperandFrom(receiver));
1951 __ Ldr(temp, HeapOperand(temp, class_offset));
1952 } else {
1953 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
1954 }
1955 // temp = temp->GetImtEntryAt(method_offset);
1956 __ Ldr(temp, HeapOperand(temp, method_offset));
1957 // lr = temp->GetEntryPoint();
1958 __ Ldr(lr, HeapOperand(temp, entry_point));
1959 // lr();
1960 __ Blr(lr);
1961 DCHECK(!codegen_->IsLeafMethod());
1962 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1963}
1964
1965void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1966 HandleInvoke(invoke);
1967}
1968
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001969void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001970 HandleInvoke(invoke);
1971}
1972
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001973void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001974 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1975 // Make sure that ArtMethod* is passed in W0 as per the calling convention
1976 DCHECK(temp.Is(w0));
1977 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
1978 invoke->GetIndexInDexCache() * kHeapRefSize;
1979
1980 // TODO: Implement all kinds of calls:
1981 // 1) boot -> boot
1982 // 2) app -> boot
1983 // 3) app -> app
1984 //
1985 // Currently we implement the app -> app logic, which looks up in the resolve cache.
1986
1987 // temp = method;
Alexandre Rames67555f72014-11-18 10:55:16 +00001988 codegen_->LoadCurrentMethod(temp);
Nicolas Geoffray4e44c822014-12-17 12:25:12 +00001989 // temp = temp->dex_cache_resolved_methods_;
1990 __ Ldr(temp, HeapOperand(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset()));
1991 // temp = temp[index_in_cache];
1992 __ Ldr(temp, HeapOperand(temp, index_in_cache));
Alexandre Rames5319def2014-10-23 10:03:10 +01001993 // lr = temp->entry_point_from_quick_compiled_code_;
Serban Constantinescu02164b32014-11-13 14:05:07 +00001994 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1995 kArm64WordSize)));
Alexandre Rames5319def2014-10-23 10:03:10 +01001996 // lr();
1997 __ Blr(lr);
1998
1999 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2000 DCHECK(!codegen_->IsLeafMethod());
2001}
2002
2003void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
2004 LocationSummary* locations = invoke->GetLocations();
2005 Location receiver = locations->InAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002006 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002007 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
2008 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
2009 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00002010 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames5319def2014-10-23 10:03:10 +01002011
2012 // temp = object->GetClass();
2013 if (receiver.IsStackSlot()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002014 __ Ldr(temp, MemOperand(sp, receiver.GetStackIndex()));
2015 __ Ldr(temp, HeapOperand(temp, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002016 } else {
2017 DCHECK(receiver.IsRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002018 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002019 }
2020 // temp = temp->GetMethodAt(method_offset);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002021 __ Ldr(temp, HeapOperand(temp, method_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002022 // lr = temp->GetEntryPoint();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002023 __ Ldr(lr, HeapOperand(temp, entry_point.SizeValue()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002024 // lr();
2025 __ Blr(lr);
2026 DCHECK(!codegen_->IsLeafMethod());
2027 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2028}
2029
Alexandre Rames67555f72014-11-18 10:55:16 +00002030void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
2031 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
2032 : LocationSummary::kNoCall;
2033 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2034 locations->SetOut(Location::RequiresRegister());
2035}
2036
2037void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
2038 Register out = OutputRegister(cls);
2039 if (cls->IsReferrersClass()) {
2040 DCHECK(!cls->CanCallRuntime());
2041 DCHECK(!cls->MustGenerateClinitCheck());
2042 codegen_->LoadCurrentMethod(out);
2043 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2044 } else {
2045 DCHECK(cls->CanCallRuntime());
2046 codegen_->LoadCurrentMethod(out);
2047 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002048 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002049
2050 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2051 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2052 codegen_->AddSlowPath(slow_path);
2053 __ Cbz(out, slow_path->GetEntryLabel());
2054 if (cls->MustGenerateClinitCheck()) {
2055 GenerateClassInitializationCheck(slow_path, out);
2056 } else {
2057 __ Bind(slow_path->GetExitLabel());
2058 }
2059 }
2060}
2061
2062void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
2063 LocationSummary* locations =
2064 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2065 locations->SetOut(Location::RequiresRegister());
2066}
2067
2068void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
2069 MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
2070 __ Ldr(OutputRegister(instruction), exception);
2071 __ Str(wzr, exception);
2072}
2073
Alexandre Rames5319def2014-10-23 10:03:10 +01002074void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
2075 load->SetLocations(nullptr);
2076}
2077
2078void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
2079 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002080 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01002081}
2082
Alexandre Rames67555f72014-11-18 10:55:16 +00002083void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
2084 LocationSummary* locations =
2085 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2086 locations->SetOut(Location::RequiresRegister());
2087}
2088
2089void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
2090 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
2091 codegen_->AddSlowPath(slow_path);
2092
2093 Register out = OutputRegister(load);
2094 codegen_->LoadCurrentMethod(out);
Mathieu Chartiereace4582014-11-24 18:29:54 -08002095 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2096 __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002097 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002098 __ Cbz(out, slow_path->GetEntryLabel());
2099 __ Bind(slow_path->GetExitLabel());
2100}
2101
Alexandre Rames5319def2014-10-23 10:03:10 +01002102void LocationsBuilderARM64::VisitLocal(HLocal* local) {
2103 local->SetLocations(nullptr);
2104}
2105
2106void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
2107 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
2108}
2109
2110void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
2111 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2112 locations->SetOut(Location::ConstantLocation(constant));
2113}
2114
2115void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
2116 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002117 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01002118}
2119
Alexandre Rames67555f72014-11-18 10:55:16 +00002120void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2121 LocationSummary* locations =
2122 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2123 InvokeRuntimeCallingConvention calling_convention;
2124 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2125}
2126
2127void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2128 codegen_->InvokeRuntime(instruction->IsEnter()
2129 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2130 instruction,
2131 instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002132 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002133}
2134
Alexandre Rames42d641b2014-10-27 14:00:51 +00002135void LocationsBuilderARM64::VisitMul(HMul* mul) {
2136 LocationSummary* locations =
2137 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2138 switch (mul->GetResultType()) {
2139 case Primitive::kPrimInt:
2140 case Primitive::kPrimLong:
2141 locations->SetInAt(0, Location::RequiresRegister());
2142 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002143 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002144 break;
2145
2146 case Primitive::kPrimFloat:
2147 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002148 locations->SetInAt(0, Location::RequiresFpuRegister());
2149 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002150 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002151 break;
2152
2153 default:
2154 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2155 }
2156}
2157
2158void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
2159 switch (mul->GetResultType()) {
2160 case Primitive::kPrimInt:
2161 case Primitive::kPrimLong:
2162 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2163 break;
2164
2165 case Primitive::kPrimFloat:
2166 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002167 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00002168 break;
2169
2170 default:
2171 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2172 }
2173}
2174
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002175void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
2176 LocationSummary* locations =
2177 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2178 switch (neg->GetResultType()) {
2179 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00002180 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002181 locations->SetInAt(0, Location::RegisterOrConstant(neg->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00002182 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002183 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002184
2185 case Primitive::kPrimFloat:
2186 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002187 locations->SetInAt(0, Location::RequiresFpuRegister());
2188 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002189 break;
2190
2191 default:
2192 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2193 }
2194}
2195
2196void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
2197 switch (neg->GetResultType()) {
2198 case Primitive::kPrimInt:
2199 case Primitive::kPrimLong:
2200 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
2201 break;
2202
2203 case Primitive::kPrimFloat:
2204 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002205 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002206 break;
2207
2208 default:
2209 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2210 }
2211}
2212
2213void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
2214 LocationSummary* locations =
2215 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2216 InvokeRuntimeCallingConvention calling_convention;
2217 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002218 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002219 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002220 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2221 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2222 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002223}
2224
2225void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
2226 LocationSummary* locations = instruction->GetLocations();
2227 InvokeRuntimeCallingConvention calling_convention;
2228 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2229 DCHECK(type_index.Is(w0));
2230 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002231 DCHECK(current_method.Is(w2));
Alexandre Rames67555f72014-11-18 10:55:16 +00002232 codegen_->LoadCurrentMethod(current_method);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002233 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002234 codegen_->InvokeRuntime(
2235 QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002236 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2237 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002238}
2239
Alexandre Rames5319def2014-10-23 10:03:10 +01002240void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
2241 LocationSummary* locations =
2242 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2243 InvokeRuntimeCallingConvention calling_convention;
2244 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2245 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2246 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002247 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002248}
2249
2250void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
2251 LocationSummary* locations = instruction->GetLocations();
2252 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2253 DCHECK(type_index.Is(w0));
2254 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2255 DCHECK(current_method.Is(w1));
Alexandre Rames67555f72014-11-18 10:55:16 +00002256 codegen_->LoadCurrentMethod(current_method);
Alexandre Rames5319def2014-10-23 10:03:10 +01002257 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002258 codegen_->InvokeRuntime(
2259 QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002260 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002261}
2262
2263void LocationsBuilderARM64::VisitNot(HNot* instruction) {
2264 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00002265 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002266 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002267}
2268
2269void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
2270 switch (instruction->InputAt(0)->GetType()) {
2271 case Primitive::kPrimBoolean:
2272 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1));
2273 break;
2274
2275 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002276 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01002277 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002278 break;
2279
2280 default:
2281 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
2282 }
2283}
2284
2285void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
2286 LocationSummary* locations =
2287 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2288 locations->SetInAt(0, Location::RequiresRegister());
2289 if (instruction->HasUses()) {
2290 locations->SetOut(Location::SameAsFirstInput());
2291 }
2292}
2293
2294void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
2295 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
2296 codegen_->AddSlowPath(slow_path);
2297
2298 LocationSummary* locations = instruction->GetLocations();
2299 Location obj = locations->InAt(0);
2300 if (obj.IsRegister()) {
2301 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
2302 } else {
2303 DCHECK(obj.IsConstant()) << obj;
2304 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2305 __ B(slow_path->GetEntryLabel());
2306 }
2307}
2308
Alexandre Rames67555f72014-11-18 10:55:16 +00002309void LocationsBuilderARM64::VisitOr(HOr* instruction) {
2310 HandleBinaryOp(instruction);
2311}
2312
2313void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
2314 HandleBinaryOp(instruction);
2315}
2316
Alexandre Rames3e69f162014-12-10 10:36:50 +00002317void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2318 LOG(FATAL) << "Unreachable";
2319}
2320
2321void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
2322 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2323}
2324
Alexandre Rames5319def2014-10-23 10:03:10 +01002325void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
2326 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2327 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2328 if (location.IsStackSlot()) {
2329 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2330 } else if (location.IsDoubleStackSlot()) {
2331 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2332 }
2333 locations->SetOut(location);
2334}
2335
2336void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
2337 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002338 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002339}
2340
2341void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
2342 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2343 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2344 locations->SetInAt(i, Location::Any());
2345 }
2346 locations->SetOut(Location::Any());
2347}
2348
2349void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002350 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002351 LOG(FATAL) << "Unreachable";
2352}
2353
Serban Constantinescu02164b32014-11-13 14:05:07 +00002354void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002355 Primitive::Type type = rem->GetResultType();
2356 LocationSummary::CallKind call_kind = IsFPType(type) ? LocationSummary::kCall
2357 : LocationSummary::kNoCall;
2358 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2359
2360 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002361 case Primitive::kPrimInt:
2362 case Primitive::kPrimLong:
2363 locations->SetInAt(0, Location::RequiresRegister());
2364 locations->SetInAt(1, Location::RequiresRegister());
2365 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2366 break;
2367
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002368 case Primitive::kPrimFloat:
2369 case Primitive::kPrimDouble: {
2370 InvokeRuntimeCallingConvention calling_convention;
2371 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
2372 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
2373 locations->SetOut(calling_convention.GetReturnLocation(type));
2374
2375 break;
2376 }
2377
Serban Constantinescu02164b32014-11-13 14:05:07 +00002378 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002379 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00002380 }
2381}
2382
2383void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
2384 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002385
Serban Constantinescu02164b32014-11-13 14:05:07 +00002386 switch (type) {
2387 case Primitive::kPrimInt:
2388 case Primitive::kPrimLong: {
2389 UseScratchRegisterScope temps(GetVIXLAssembler());
2390 Register dividend = InputRegisterAt(rem, 0);
2391 Register divisor = InputRegisterAt(rem, 1);
2392 Register output = OutputRegister(rem);
2393 Register temp = temps.AcquireSameSizeAs(output);
2394
2395 __ Sdiv(temp, dividend, divisor);
2396 __ Msub(output, temp, divisor, dividend);
2397 break;
2398 }
2399
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002400 case Primitive::kPrimFloat:
2401 case Primitive::kPrimDouble: {
2402 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
2403 : QUICK_ENTRY_POINT(pFmod);
2404 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc());
2405 break;
2406 }
2407
Serban Constantinescu02164b32014-11-13 14:05:07 +00002408 default:
2409 LOG(FATAL) << "Unexpected rem type " << type;
2410 }
2411}
2412
Alexandre Rames5319def2014-10-23 10:03:10 +01002413void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
2414 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2415 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002416 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01002417}
2418
2419void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002420 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002421 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002422 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002423}
2424
2425void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
2426 instruction->SetLocations(nullptr);
2427}
2428
2429void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002430 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002431 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002432 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002433}
2434
Serban Constantinescu02164b32014-11-13 14:05:07 +00002435void LocationsBuilderARM64::VisitShl(HShl* shl) {
2436 HandleShift(shl);
2437}
2438
2439void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
2440 HandleShift(shl);
2441}
2442
2443void LocationsBuilderARM64::VisitShr(HShr* shr) {
2444 HandleShift(shr);
2445}
2446
2447void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
2448 HandleShift(shr);
2449}
2450
Alexandre Rames5319def2014-10-23 10:03:10 +01002451void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
2452 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
2453 Primitive::Type field_type = store->InputAt(1)->GetType();
2454 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002455 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01002456 case Primitive::kPrimBoolean:
2457 case Primitive::kPrimByte:
2458 case Primitive::kPrimChar:
2459 case Primitive::kPrimShort:
2460 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002461 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01002462 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
2463 break;
2464
2465 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002466 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01002467 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
2468 break;
2469
2470 default:
2471 LOG(FATAL) << "Unimplemented local type " << field_type;
2472 }
2473}
2474
2475void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002476 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01002477}
2478
2479void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002480 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002481}
2482
2483void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002484 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002485}
2486
Alexandre Rames67555f72014-11-18 10:55:16 +00002487void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2488 LocationSummary* locations =
2489 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2490 locations->SetInAt(0, Location::RequiresRegister());
2491 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2492}
2493
2494void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002495 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002496
2497 if (instruction->IsVolatile()) {
2498 if (kUseAcquireRelease) {
2499 codegen_->LoadAcquire(instruction->GetType(), OutputCPURegister(instruction), field);
2500 } else {
2501 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2502 // For IRIW sequential consistency kLoadAny is not sufficient.
2503 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2504 }
2505 } else {
2506 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2507 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002508}
2509
2510void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002511 LocationSummary* locations =
2512 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2513 locations->SetInAt(0, Location::RequiresRegister());
2514 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01002515}
2516
Alexandre Rames67555f72014-11-18 10:55:16 +00002517void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002518 Register cls = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002519 CPURegister value = InputCPURegisterAt(instruction, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002520 Offset offset = instruction->GetFieldOffset();
Alexandre Rames67555f72014-11-18 10:55:16 +00002521 Primitive::Type field_type = instruction->GetFieldType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002522
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002523 if (instruction->IsVolatile()) {
2524 if (kUseAcquireRelease) {
2525 codegen_->StoreRelease(field_type, value, HeapOperand(cls, offset));
2526 } else {
2527 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2528 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2529 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2530 }
2531 } else {
2532 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2533 }
2534
2535 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002536 codegen_->MarkGCCard(cls, Register(value));
2537 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002538}
2539
2540void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2541 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2542}
2543
2544void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002545 HBasicBlock* block = instruction->GetBlock();
2546 if (block->GetLoopInformation() != nullptr) {
2547 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2548 // The back edge will generate the suspend check.
2549 return;
2550 }
2551 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2552 // The goto will generate the suspend check.
2553 return;
2554 }
2555 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01002556}
2557
2558void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
2559 temp->SetLocations(nullptr);
2560}
2561
2562void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
2563 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002564 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01002565}
2566
Alexandre Rames67555f72014-11-18 10:55:16 +00002567void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
2568 LocationSummary* locations =
2569 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2570 InvokeRuntimeCallingConvention calling_convention;
2571 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2572}
2573
2574void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
2575 codegen_->InvokeRuntime(
2576 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002577 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002578}
2579
2580void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
2581 LocationSummary* locations =
2582 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2583 Primitive::Type input_type = conversion->GetInputType();
2584 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002585 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00002586 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
2587 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
2588 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
2589 }
2590
2591 if (IsFPType(input_type)) {
2592 locations->SetInAt(0, Location::RequiresFpuRegister());
2593 } else {
2594 locations->SetInAt(0, Location::RequiresRegister());
2595 }
2596
2597 if (IsFPType(result_type)) {
2598 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2599 } else {
2600 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2601 }
2602}
2603
2604void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
2605 Primitive::Type result_type = conversion->GetResultType();
2606 Primitive::Type input_type = conversion->GetInputType();
2607
2608 DCHECK_NE(input_type, result_type);
2609
2610 if (IsIntegralType(result_type) && IsIntegralType(input_type)) {
2611 int result_size = Primitive::ComponentSize(result_type);
2612 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002613 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002614 Register output = OutputRegister(conversion);
2615 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002616 if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
2617 __ Ubfx(output, source, 0, result_size * kBitsPerByte);
2618 } else if ((result_type == Primitive::kPrimChar) ||
2619 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
2620 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002621 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002622 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002623 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002624 } else if (IsFPType(result_type) && IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002625 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
2626 } else if (IsIntegralType(result_type) && IsFPType(input_type)) {
2627 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
2628 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
2629 } else if (IsFPType(result_type) && IsFPType(input_type)) {
2630 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
2631 } else {
2632 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
2633 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00002634 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002635}
Alexandre Rames67555f72014-11-18 10:55:16 +00002636
Serban Constantinescu02164b32014-11-13 14:05:07 +00002637void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
2638 HandleShift(ushr);
2639}
2640
2641void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
2642 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00002643}
2644
2645void LocationsBuilderARM64::VisitXor(HXor* instruction) {
2646 HandleBinaryOp(instruction);
2647}
2648
2649void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
2650 HandleBinaryOp(instruction);
2651}
2652
2653#undef __
2654#undef QUICK_ENTRY_POINT
2655
Alexandre Rames5319def2014-10-23 10:03:10 +01002656} // namespace arm64
2657} // namespace art