blob: e10b47cde81bec5443271d24b0c6219365c384d4 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/arm64/assembler_arm64.h"
26#include "utils/assembler.h"
27#include "utils/stack_checks.h"
28
29
30using namespace vixl; // NOLINT(build/namespaces)
31
32#ifdef __
33#error "ARM64 Codegen VIXL macro-assembler macro already defined."
34#endif
35
36
37namespace art {
38
39namespace arm64 {
40
Serban Constantinescu02d81cc2015-01-05 16:08:49 +000041// TODO: Tune the use of Load-Acquire, Store-Release vs Data Memory Barriers.
42// For now we prefer the use of load-acquire, store-release over explicit memory barriers.
43static constexpr bool kUseAcquireRelease = true;
Serban Constantinescu02164b32014-11-13 14:05:07 +000044static constexpr bool kExplicitStackOverflowCheck = false;
Alexandre Rames5319def2014-10-23 10:03:10 +010045static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
46static constexpr int kCurrentMethodStackOffset = 0;
47
48namespace {
Alexandre Ramesa89086e2014-11-07 17:13:25 +000049
50bool IsFPType(Primitive::Type type) {
51 return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble;
52}
53
Alexandre Rames67555f72014-11-18 10:55:16 +000054bool IsIntegralType(Primitive::Type type) {
55 switch (type) {
56 case Primitive::kPrimByte:
57 case Primitive::kPrimChar:
58 case Primitive::kPrimShort:
59 case Primitive::kPrimInt:
60 case Primitive::kPrimLong:
61 return true;
62 default:
63 return false;
64 }
65}
66
Alexandre Ramesa89086e2014-11-07 17:13:25 +000067bool Is64BitType(Primitive::Type type) {
68 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
69}
70
Alexandre Rames5319def2014-10-23 10:03:10 +010071// Convenience helpers to ease conversion to and from VIXL operands.
Alexandre Rames67555f72014-11-18 10:55:16 +000072static_assert((SP == 31) && (WSP == 31) && (XZR == 32) && (WZR == 32),
73 "Unexpected values for register codes.");
Alexandre Rames5319def2014-10-23 10:03:10 +010074
75int VIXLRegCodeFromART(int code) {
Alexandre Rames5319def2014-10-23 10:03:10 +010076 if (code == SP) {
77 return vixl::kSPRegInternalCode;
78 }
79 if (code == XZR) {
80 return vixl::kZeroRegCode;
81 }
82 return code;
83}
84
85int ARTRegCodeFromVIXL(int code) {
Alexandre Rames5319def2014-10-23 10:03:10 +010086 if (code == vixl::kSPRegInternalCode) {
87 return SP;
88 }
89 if (code == vixl::kZeroRegCode) {
90 return XZR;
91 }
92 return code;
93}
94
95Register XRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +000096 DCHECK(location.IsRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +010097 return Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
98}
99
100Register WRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000101 DCHECK(location.IsRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +0100102 return Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
103}
104
105Register RegisterFrom(Location location, Primitive::Type type) {
106 DCHECK(type != Primitive::kPrimVoid && !IsFPType(type));
107 return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
108}
109
110Register OutputRegister(HInstruction* instr) {
111 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
112}
113
114Register InputRegisterAt(HInstruction* instr, int input_index) {
115 return RegisterFrom(instr->GetLocations()->InAt(input_index),
116 instr->InputAt(input_index)->GetType());
117}
118
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000119FPRegister DRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000120 DCHECK(location.IsFpuRegister());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000121 return FPRegister::DRegFromCode(location.reg());
122}
123
124FPRegister SRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000125 DCHECK(location.IsFpuRegister());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000126 return FPRegister::SRegFromCode(location.reg());
127}
128
129FPRegister FPRegisterFrom(Location location, Primitive::Type type) {
130 DCHECK(IsFPType(type));
131 return type == Primitive::kPrimDouble ? DRegisterFrom(location) : SRegisterFrom(location);
132}
133
134FPRegister OutputFPRegister(HInstruction* instr) {
135 return FPRegisterFrom(instr->GetLocations()->Out(), instr->GetType());
136}
137
138FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) {
139 return FPRegisterFrom(instr->GetLocations()->InAt(input_index),
140 instr->InputAt(input_index)->GetType());
141}
142
Alexandre Rames3e69f162014-12-10 10:36:50 +0000143CPURegister CPURegisterFrom(Location location, Primitive::Type type) {
144 return IsFPType(type) ? CPURegister(FPRegisterFrom(location, type))
145 : CPURegister(RegisterFrom(location, type));
146}
147
Alexandre Rames67555f72014-11-18 10:55:16 +0000148CPURegister OutputCPURegister(HInstruction* instr) {
149 return IsFPType(instr->GetType()) ? static_cast<CPURegister>(OutputFPRegister(instr))
150 : static_cast<CPURegister>(OutputRegister(instr));
151}
152
153CPURegister InputCPURegisterAt(HInstruction* instr, int index) {
154 return IsFPType(instr->InputAt(index)->GetType())
155 ? static_cast<CPURegister>(InputFPRegisterAt(instr, index))
156 : static_cast<CPURegister>(InputRegisterAt(instr, index));
157}
158
Alexandre Rames5319def2014-10-23 10:03:10 +0100159int64_t Int64ConstantFrom(Location location) {
160 HConstant* instr = location.GetConstant();
161 return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue()
162 : instr->AsLongConstant()->GetValue();
163}
164
165Operand OperandFrom(Location location, Primitive::Type type) {
166 if (location.IsRegister()) {
167 return Operand(RegisterFrom(location, type));
168 } else {
169 return Operand(Int64ConstantFrom(location));
170 }
171}
172
173Operand InputOperandAt(HInstruction* instr, int input_index) {
174 return OperandFrom(instr->GetLocations()->InAt(input_index),
175 instr->InputAt(input_index)->GetType());
176}
177
178MemOperand StackOperandFrom(Location location) {
179 return MemOperand(sp, location.GetStackIndex());
180}
181
Serban Constantinescu02164b32014-11-13 14:05:07 +0000182MemOperand HeapOperand(const Register& base, size_t offset = 0) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100183 // A heap reference must be 32bit, so fit in a W register.
184 DCHECK(base.IsW());
Alexandre Rames67555f72014-11-18 10:55:16 +0000185 return MemOperand(base.X(), offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100186}
187
Alexandre Rames67555f72014-11-18 10:55:16 +0000188MemOperand HeapOperand(const Register& base, Offset offset) {
189 return HeapOperand(base, offset.SizeValue());
190}
191
192MemOperand HeapOperandFrom(Location location, Offset offset) {
193 return HeapOperand(RegisterFrom(location, Primitive::kPrimNot), offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100194}
195
196Location LocationFrom(const Register& reg) {
197 return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
198}
199
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000200Location LocationFrom(const FPRegister& fpreg) {
201 return Location::FpuRegisterLocation(fpreg.code());
202}
203
Alexandre Rames5319def2014-10-23 10:03:10 +0100204} // namespace
205
206inline Condition ARM64Condition(IfCondition cond) {
207 switch (cond) {
208 case kCondEQ: return eq;
209 case kCondNE: return ne;
210 case kCondLT: return lt;
211 case kCondLE: return le;
212 case kCondGT: return gt;
213 case kCondGE: return ge;
214 default:
215 LOG(FATAL) << "Unknown if condition";
216 }
217 return nv; // Unreachable.
218}
219
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000220Location ARM64ReturnLocation(Primitive::Type return_type) {
221 DCHECK_NE(return_type, Primitive::kPrimVoid);
222 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
223 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
224 // but we use the exact registers for clarity.
225 if (return_type == Primitive::kPrimFloat) {
226 return LocationFrom(s0);
227 } else if (return_type == Primitive::kPrimDouble) {
228 return LocationFrom(d0);
229 } else if (return_type == Primitive::kPrimLong) {
230 return LocationFrom(x0);
231 } else {
232 return LocationFrom(w0);
233 }
234}
235
Alexandre Rames5319def2014-10-23 10:03:10 +0100236static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
237static constexpr size_t kRuntimeParameterCoreRegistersLength =
238 arraysize(kRuntimeParameterCoreRegisters);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000239static const FPRegister kRuntimeParameterFpuRegisters[] = { d0, d1, d2, d3, d4, d5, d6, d7 };
240static constexpr size_t kRuntimeParameterFpuRegistersLength =
241 arraysize(kRuntimeParameterCoreRegisters);
Alexandre Rames5319def2014-10-23 10:03:10 +0100242
243class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
244 public:
245 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
246
247 InvokeRuntimeCallingConvention()
248 : CallingConvention(kRuntimeParameterCoreRegisters,
249 kRuntimeParameterCoreRegistersLength,
250 kRuntimeParameterFpuRegisters,
251 kRuntimeParameterFpuRegistersLength) {}
252
253 Location GetReturnLocation(Primitive::Type return_type);
254
255 private:
256 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
257};
258
259Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000260 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100261}
262
Alexandre Rames67555f72014-11-18 10:55:16 +0000263#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
264#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100265
266class SlowPathCodeARM64 : public SlowPathCode {
267 public:
268 SlowPathCodeARM64() : entry_label_(), exit_label_() {}
269
270 vixl::Label* GetEntryLabel() { return &entry_label_; }
271 vixl::Label* GetExitLabel() { return &exit_label_; }
272
273 private:
274 vixl::Label entry_label_;
275 vixl::Label exit_label_;
276
277 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
278};
279
280class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
281 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000282 BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
283 Location index_location,
284 Location length_location)
285 : instruction_(instruction),
286 index_location_(index_location),
287 length_location_(length_location) {}
288
Alexandre Rames5319def2014-10-23 10:03:10 +0100289
Alexandre Rames67555f72014-11-18 10:55:16 +0000290 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000291 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100292 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000293 // We're moving two locations to locations that could overlap, so we need a parallel
294 // move resolver.
295 InvokeRuntimeCallingConvention calling_convention;
296 codegen->EmitParallelMoves(
297 index_location_, LocationFrom(calling_convention.GetRegisterAt(0)),
298 length_location_, LocationFrom(calling_convention.GetRegisterAt(1)));
299 arm64_codegen->InvokeRuntime(
300 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +0100301 }
302
303 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000304 HBoundsCheck* const instruction_;
305 const Location index_location_;
306 const Location length_location_;
307
Alexandre Rames5319def2014-10-23 10:03:10 +0100308 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
309};
310
Alexandre Rames67555f72014-11-18 10:55:16 +0000311class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
312 public:
313 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
314
315 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
316 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
317 __ Bind(GetEntryLabel());
318 arm64_codegen->InvokeRuntime(
319 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
320 }
321
322 private:
323 HDivZeroCheck* const instruction_;
324 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
325};
326
327class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
328 public:
329 LoadClassSlowPathARM64(HLoadClass* cls,
330 HInstruction* at,
331 uint32_t dex_pc,
332 bool do_clinit)
333 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
334 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
335 }
336
337 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
338 LocationSummary* locations = at_->GetLocations();
339 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
340
341 __ Bind(GetEntryLabel());
342 codegen->SaveLiveRegisters(locations);
343
344 InvokeRuntimeCallingConvention calling_convention;
345 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
346 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
347 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
348 : QUICK_ENTRY_POINT(pInitializeType);
349 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
350
351 // Move the class to the desired location.
352 Location out = locations->Out();
353 if (out.IsValid()) {
354 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
355 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000356 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000357 }
358
359 codegen->RestoreLiveRegisters(locations);
360 __ B(GetExitLabel());
361 }
362
363 private:
364 // The class this slow path will load.
365 HLoadClass* const cls_;
366
367 // The instruction where this slow path is happening.
368 // (Might be the load class or an initialization check).
369 HInstruction* const at_;
370
371 // The dex PC of `at_`.
372 const uint32_t dex_pc_;
373
374 // Whether to initialize the class.
375 const bool do_clinit_;
376
377 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
378};
379
380class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
381 public:
382 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
383
384 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
385 LocationSummary* locations = instruction_->GetLocations();
386 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
387 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
388
389 __ Bind(GetEntryLabel());
390 codegen->SaveLiveRegisters(locations);
391
392 InvokeRuntimeCallingConvention calling_convention;
393 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0).W());
394 __ Mov(calling_convention.GetRegisterAt(1).W(), instruction_->GetStringIndex());
395 arm64_codegen->InvokeRuntime(
396 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
397 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000398 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000399
400 codegen->RestoreLiveRegisters(locations);
401 __ B(GetExitLabel());
402 }
403
404 private:
405 HLoadString* const instruction_;
406
407 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
408};
409
Alexandre Rames5319def2014-10-23 10:03:10 +0100410class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
411 public:
412 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
413
Alexandre Rames67555f72014-11-18 10:55:16 +0000414 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
415 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100416 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000417 arm64_codegen->InvokeRuntime(
418 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +0100419 }
420
421 private:
422 HNullCheck* const instruction_;
423
424 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
425};
426
Serban Constantinescu02164b32014-11-13 14:05:07 +0000427class StackOverflowCheckSlowPathARM64 : public SlowPathCodeARM64 {
428 public:
429 StackOverflowCheckSlowPathARM64() {}
430
431 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
432 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
433 __ Bind(GetEntryLabel());
434 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowStackOverflow), nullptr, 0);
435 }
436
437 private:
438 DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM64);
439};
440
Alexandre Rames5319def2014-10-23 10:03:10 +0100441class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
442 public:
443 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
444 HBasicBlock* successor)
445 : instruction_(instruction), successor_(successor) {}
446
Alexandre Rames67555f72014-11-18 10:55:16 +0000447 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
448 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100449 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000450 codegen->SaveLiveRegisters(instruction_->GetLocations());
451 arm64_codegen->InvokeRuntime(
452 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
453 codegen->RestoreLiveRegisters(instruction_->GetLocations());
454 if (successor_ == nullptr) {
455 __ B(GetReturnLabel());
456 } else {
457 __ B(arm64_codegen->GetLabelOf(successor_));
458 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100459 }
460
461 vixl::Label* GetReturnLabel() {
462 DCHECK(successor_ == nullptr);
463 return &return_label_;
464 }
465
Alexandre Rames5319def2014-10-23 10:03:10 +0100466 private:
467 HSuspendCheck* const instruction_;
468 // If not null, the block to branch to after the suspend check.
469 HBasicBlock* const successor_;
470
471 // If `successor_` is null, the label to branch to after the suspend check.
472 vixl::Label return_label_;
473
474 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
475};
476
Alexandre Rames67555f72014-11-18 10:55:16 +0000477class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
478 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000479 TypeCheckSlowPathARM64(HInstruction* instruction,
480 Location class_to_check,
481 Location object_class,
482 uint32_t dex_pc)
483 : instruction_(instruction),
484 class_to_check_(class_to_check),
485 object_class_(object_class),
486 dex_pc_(dex_pc) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000487
488 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000489 LocationSummary* locations = instruction_->GetLocations();
490 DCHECK(instruction_->IsCheckCast()
491 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
492 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
493
Alexandre Rames67555f72014-11-18 10:55:16 +0000494 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000495 codegen->SaveLiveRegisters(locations);
496
497 // We're moving two locations to locations that could overlap, so we need a parallel
498 // move resolver.
499 InvokeRuntimeCallingConvention calling_convention;
500 codegen->EmitParallelMoves(
501 class_to_check_, LocationFrom(calling_convention.GetRegisterAt(0)),
502 object_class_, LocationFrom(calling_convention.GetRegisterAt(1)));
503
504 if (instruction_->IsInstanceOf()) {
505 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
506 Primitive::Type ret_type = instruction_->GetType();
507 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
508 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
509 } else {
510 DCHECK(instruction_->IsCheckCast());
511 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
512 }
513
514 codegen->RestoreLiveRegisters(locations);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000515 __ B(GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000516 }
517
518 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000519 HInstruction* const instruction_;
520 const Location class_to_check_;
521 const Location object_class_;
522 uint32_t dex_pc_;
523
Alexandre Rames67555f72014-11-18 10:55:16 +0000524 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
525};
526
Alexandre Rames5319def2014-10-23 10:03:10 +0100527#undef __
528
529Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
530 Location next_location;
531 if (type == Primitive::kPrimVoid) {
532 LOG(FATAL) << "Unreachable type " << type;
533 }
534
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000535 if (IsFPType(type) && (fp_index_ < calling_convention.GetNumberOfFpuRegisters())) {
536 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(fp_index_++));
537 } else if (!IsFPType(type) && (gp_index_ < calling_convention.GetNumberOfRegisters())) {
538 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
539 } else {
540 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
541 next_location = Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
542 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100543 }
544
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000545 // Space on the stack is reserved for all arguments.
546 stack_index_ += Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100547 return next_location;
548}
549
550CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph)
551 : CodeGenerator(graph,
552 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000553 kNumberOfAllocatableFPRegisters,
Alexandre Rames5319def2014-10-23 10:03:10 +0100554 kNumberOfAllocatableRegisterPairs),
555 block_labels_(nullptr),
556 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000557 instruction_visitor_(graph, this),
558 move_resolver_(graph->GetArena(), this) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100559
Alexandre Rames67555f72014-11-18 10:55:16 +0000560#undef __
561#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100562
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000563void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
564 // Ensure we emit the literal pool.
565 __ FinalizeCode();
566 CodeGenerator::Finalize(allocator);
567}
568
Alexandre Rames3e69f162014-12-10 10:36:50 +0000569void ParallelMoveResolverARM64::EmitMove(size_t index) {
570 MoveOperands* move = moves_.Get(index);
571 codegen_->MoveLocation(move->GetDestination(), move->GetSource());
572}
573
574void ParallelMoveResolverARM64::EmitSwap(size_t index) {
575 MoveOperands* move = moves_.Get(index);
576 codegen_->SwapLocations(move->GetDestination(), move->GetSource());
577}
578
579void ParallelMoveResolverARM64::RestoreScratch(int reg) {
580 __ Pop(Register(VIXLRegCodeFromART(reg), kXRegSize));
581}
582
583void ParallelMoveResolverARM64::SpillScratch(int reg) {
584 __ Push(Register(VIXLRegCodeFromART(reg), kXRegSize));
585}
586
Alexandre Rames5319def2014-10-23 10:03:10 +0100587void CodeGeneratorARM64::GenerateFrameEntry() {
Serban Constantinescu02164b32014-11-13 14:05:07 +0000588 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
589 if (do_overflow_check) {
590 UseScratchRegisterScope temps(GetVIXLAssembler());
591 Register temp = temps.AcquireX();
592 if (kExplicitStackOverflowCheck) {
593 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM64();
594 AddSlowPath(slow_path);
595
596 __ Ldr(temp, MemOperand(tr, Thread::StackEndOffset<kArm64WordSize>().Int32Value()));
597 __ Cmp(sp, temp);
598 __ B(lo, slow_path->GetEntryLabel());
599 } else {
600 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
601 __ Ldr(wzr, MemOperand(temp, 0));
602 RecordPcInfo(nullptr, 0);
603 }
604 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100605
606 CPURegList preserved_regs = GetFramePreservedRegisters();
607 int frame_size = GetFrameSize();
608 core_spill_mask_ |= preserved_regs.list();
609
610 __ Str(w0, MemOperand(sp, -frame_size, PreIndex));
611 __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
612
613 // Stack layout:
614 // sp[frame_size - 8] : lr.
615 // ... : other preserved registers.
616 // sp[frame_size - regs_size]: first preserved register.
617 // ... : reserved frame space.
Alexandre Rames67555f72014-11-18 10:55:16 +0000618 // sp[0] : current method.
Alexandre Rames5319def2014-10-23 10:03:10 +0100619}
620
621void CodeGeneratorARM64::GenerateFrameExit() {
622 int frame_size = GetFrameSize();
623 CPURegList preserved_regs = GetFramePreservedRegisters();
624 __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
625 __ Drop(frame_size);
626}
627
628void CodeGeneratorARM64::Bind(HBasicBlock* block) {
629 __ Bind(GetLabelOf(block));
630}
631
Alexandre Rames5319def2014-10-23 10:03:10 +0100632void CodeGeneratorARM64::Move(HInstruction* instruction,
633 Location location,
634 HInstruction* move_for) {
635 LocationSummary* locations = instruction->GetLocations();
636 if (locations != nullptr && locations->Out().Equals(location)) {
637 return;
638 }
639
640 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000641 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100642
643 if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
644 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
645 : instruction->AsLongConstant()->GetValue();
646 if (location.IsRegister()) {
647 Register dst = RegisterFrom(location, type);
648 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
649 (instruction->IsLongConstant() && dst.Is64Bits()));
650 __ Mov(dst, value);
651 } else {
652 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000653 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100654 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
655 __ Mov(temp, value);
656 __ Str(temp, StackOperandFrom(location));
657 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000658 } else if (instruction->IsTemporary()) {
659 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000660 MoveLocation(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100661 } else if (instruction->IsLoadLocal()) {
662 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000663 if (Is64BitType(type)) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000664 MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000665 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000666 MoveLocation(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100667 }
668
669 } else {
670 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000671 MoveLocation(location, locations->Out(), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100672 }
673}
674
675size_t CodeGeneratorARM64::FrameEntrySpillSize() const {
676 return GetFramePreservedRegistersSize();
677}
678
679Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
680 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000681
Alexandre Rames5319def2014-10-23 10:03:10 +0100682 switch (type) {
683 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000684 case Primitive::kPrimInt:
685 case Primitive::kPrimFloat:
686 return Location::StackSlot(GetStackSlot(load->GetLocal()));
687
688 case Primitive::kPrimLong:
689 case Primitive::kPrimDouble:
690 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
691
Alexandre Rames5319def2014-10-23 10:03:10 +0100692 case Primitive::kPrimBoolean:
693 case Primitive::kPrimByte:
694 case Primitive::kPrimChar:
695 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100696 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100697 LOG(FATAL) << "Unexpected type " << type;
698 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000699
Alexandre Rames5319def2014-10-23 10:03:10 +0100700 LOG(FATAL) << "Unreachable";
701 return Location::NoLocation();
702}
703
704void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000705 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100706 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000707 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +0100708 vixl::Label done;
709 __ Cbz(value, &done);
710 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
711 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000712 __ Strb(card, MemOperand(card, temp.X()));
Alexandre Rames5319def2014-10-23 10:03:10 +0100713 __ Bind(&done);
714}
715
716void CodeGeneratorARM64::SetupBlockedRegisters() const {
717 // Block reserved registers:
718 // ip0 (VIXL temporary)
719 // ip1 (VIXL temporary)
Serban Constantinescu02164b32014-11-13 14:05:07 +0000720 // tr
Alexandre Rames5319def2014-10-23 10:03:10 +0100721 // lr
722 // sp is not part of the allocatable registers, so we don't need to block it.
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000723 // TODO: Avoid blocking callee-saved registers, and instead preserve them
724 // where necessary.
Alexandre Rames5319def2014-10-23 10:03:10 +0100725 CPURegList reserved_core_registers = vixl_reserved_core_registers;
726 reserved_core_registers.Combine(runtime_reserved_core_registers);
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000727 reserved_core_registers.Combine(quick_callee_saved_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100728 while (!reserved_core_registers.IsEmpty()) {
729 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
730 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000731 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
732 reserved_fp_registers.Combine(CPURegList::GetCalleeSavedFP());
733 while (!reserved_core_registers.IsEmpty()) {
734 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
735 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100736}
737
738Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
739 if (type == Primitive::kPrimVoid) {
740 LOG(FATAL) << "Unreachable type " << type;
741 }
742
Alexandre Rames5319def2014-10-23 10:03:10 +0100743 if (IsFPType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000744 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
745 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100746 return Location::FpuRegisterLocation(reg);
747 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000748 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
749 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100750 return Location::RegisterLocation(reg);
751 }
752}
753
Alexandre Rames3e69f162014-12-10 10:36:50 +0000754size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
755 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
756 __ Str(reg, MemOperand(sp, stack_index));
757 return kArm64WordSize;
758}
759
760size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
761 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
762 __ Ldr(reg, MemOperand(sp, stack_index));
763 return kArm64WordSize;
764}
765
766size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
767 FPRegister reg = FPRegister(reg_id, kDRegSize);
768 __ Str(reg, MemOperand(sp, stack_index));
769 return kArm64WordSize;
770}
771
772size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
773 FPRegister reg = FPRegister(reg_id, kDRegSize);
774 __ Ldr(reg, MemOperand(sp, stack_index));
775 return kArm64WordSize;
776}
777
Alexandre Rames5319def2014-10-23 10:03:10 +0100778void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
779 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
780}
781
782void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
783 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
784}
785
Alexandre Rames67555f72014-11-18 10:55:16 +0000786void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
787 if (constant->IsIntConstant() || constant->IsLongConstant()) {
788 __ Mov(Register(destination),
789 constant->IsIntConstant() ? constant->AsIntConstant()->GetValue()
790 : constant->AsLongConstant()->GetValue());
791 } else if (constant->IsFloatConstant()) {
792 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
793 } else {
794 DCHECK(constant->IsDoubleConstant());
795 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
796 }
797}
798
Alexandre Rames3e69f162014-12-10 10:36:50 +0000799
800static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
801 DCHECK(constant.IsConstant());
802 HConstant* cst = constant.GetConstant();
803 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
804 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
805 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
806 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
807}
808
809void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000810 if (source.Equals(destination)) {
811 return;
812 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000813
814 // A valid move can always be inferred from the destination and source
815 // locations. When moving from and to a register, the argument type can be
816 // used to generate 32bit instead of 64bit moves. In debug mode we also
817 // checks the coherency of the locations and the type.
818 bool unspecified_type = (type == Primitive::kPrimVoid);
819
820 if (destination.IsRegister() || destination.IsFpuRegister()) {
821 if (unspecified_type) {
822 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
823 if (source.IsStackSlot() ||
824 (src_cst != nullptr && (src_cst->IsIntConstant() || src_cst->IsFloatConstant()))) {
825 // For stack slots and 32bit constants, a 64bit type is appropriate.
826 type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +0000827 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000828 // If the source is a double stack slot or a 64bit constant, a 64bit
829 // type is appropriate. Else the source is a register, and since the
830 // type has not been specified, we chose a 64bit type to force a 64bit
831 // move.
832 type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +0000833 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000834 }
835 DCHECK((destination.IsFpuRegister() && IsFPType(type)) ||
836 (destination.IsRegister() && !IsFPType(type)));
837 CPURegister dst = CPURegisterFrom(destination, type);
838 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
839 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
840 __ Ldr(dst, StackOperandFrom(source));
841 } else if (source.IsConstant()) {
842 DCHECK(CoherentConstantAndType(source, type));
843 MoveConstant(dst, source.GetConstant());
844 } else {
845 if (destination.IsRegister()) {
846 __ Mov(Register(dst), RegisterFrom(source, type));
847 } else {
848 __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
849 }
850 }
851
852 } else { // The destination is not a register. It must be a stack slot.
853 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
854 if (source.IsRegister() || source.IsFpuRegister()) {
855 if (unspecified_type) {
856 if (source.IsRegister()) {
857 type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
858 } else {
859 type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
860 }
861 }
862 DCHECK((destination.IsDoubleStackSlot() == Is64BitType(type)) &&
863 (source.IsFpuRegister() == IsFPType(type)));
864 __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
865 } else if (source.IsConstant()) {
866 DCHECK(unspecified_type || CoherentConstantAndType(source, type));
867 UseScratchRegisterScope temps(GetVIXLAssembler());
868 HConstant* src_cst = source.GetConstant();
869 CPURegister temp;
870 if (src_cst->IsIntConstant()) {
871 temp = temps.AcquireW();
872 } else if (src_cst->IsLongConstant()) {
873 temp = temps.AcquireX();
874 } else if (src_cst->IsFloatConstant()) {
875 temp = temps.AcquireS();
876 } else {
877 DCHECK(src_cst->IsDoubleConstant());
878 temp = temps.AcquireD();
879 }
880 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000881 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000882 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +0000883 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000884 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000885 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000886 // There is generally less pressure on FP registers.
887 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000888 __ Ldr(temp, StackOperandFrom(source));
889 __ Str(temp, StackOperandFrom(destination));
890 }
891 }
892}
893
Alexandre Rames3e69f162014-12-10 10:36:50 +0000894void CodeGeneratorARM64::SwapLocations(Location loc1, Location loc2) {
895 DCHECK(!loc1.IsConstant());
896 DCHECK(!loc2.IsConstant());
897
898 if (loc1.Equals(loc2)) {
899 return;
900 }
901
902 UseScratchRegisterScope temps(GetAssembler()->vixl_masm_);
903
904 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
905 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
906 bool is_fp_reg1 = loc1.IsFpuRegister();
907 bool is_fp_reg2 = loc2.IsFpuRegister();
908
909 if (loc2.IsRegister() && loc1.IsRegister()) {
910 Register r1 = XRegisterFrom(loc1);
911 Register r2 = XRegisterFrom(loc2);
912 Register tmp = temps.AcquireSameSizeAs(r1);
913 __ Mov(tmp, r2);
914 __ Mov(r2, r1);
915 __ Mov(r1, tmp);
916 } else if (is_fp_reg2 && is_fp_reg1) {
917 FPRegister r1 = DRegisterFrom(loc1);
918 FPRegister r2 = DRegisterFrom(loc2);
919 FPRegister tmp = temps.AcquireSameSizeAs(r1);
920 __ Fmov(tmp, r2);
921 __ Fmov(r2, r1);
922 __ Fmov(r1, tmp);
923 } else if (is_slot1 != is_slot2) {
924 MemOperand mem = StackOperandFrom(is_slot1 ? loc1 : loc2);
925 Location reg_loc = is_slot1 ? loc2 : loc1;
926 CPURegister reg, tmp;
927 if (reg_loc.IsFpuRegister()) {
928 reg = DRegisterFrom(reg_loc);
929 tmp = temps.AcquireD();
930 } else {
931 reg = XRegisterFrom(reg_loc);
932 tmp = temps.AcquireX();
933 }
934 __ Ldr(tmp, mem);
935 __ Str(reg, mem);
936 if (reg_loc.IsFpuRegister()) {
937 __ Fmov(FPRegister(reg), FPRegister(tmp));
938 } else {
939 __ Mov(Register(reg), Register(tmp));
940 }
941 } else if (is_slot1 && is_slot2) {
942 MemOperand mem1 = StackOperandFrom(loc1);
943 MemOperand mem2 = StackOperandFrom(loc2);
944 Register tmp1 = loc1.IsStackSlot() ? temps.AcquireW() : temps.AcquireX();
945 Register tmp2 = temps.AcquireSameSizeAs(tmp1);
946 __ Ldr(tmp1, mem1);
947 __ Ldr(tmp2, mem2);
948 __ Str(tmp1, mem2);
949 __ Str(tmp2, mem1);
950 } else {
951 LOG(FATAL) << "Unimplemented";
952 }
953}
954
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000955void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000956 CPURegister dst,
957 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000958 switch (type) {
959 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +0000960 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000961 break;
962 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +0000963 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000964 break;
965 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +0000966 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000967 break;
968 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +0000969 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000970 break;
971 case Primitive::kPrimInt:
972 case Primitive::kPrimNot:
973 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000974 case Primitive::kPrimFloat:
975 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000976 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +0000977 __ Ldr(dst, src);
978 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000979 case Primitive::kPrimVoid:
980 LOG(FATAL) << "Unreachable type " << type;
981 }
982}
983
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000984void CodeGeneratorARM64::LoadAcquire(Primitive::Type type,
985 CPURegister dst,
986 const MemOperand& src) {
987 UseScratchRegisterScope temps(GetVIXLAssembler());
988 Register temp_base = temps.AcquireX();
989
990 DCHECK(!src.IsRegisterOffset());
991 DCHECK(!src.IsPreIndex());
992 DCHECK(!src.IsPostIndex());
993
994 // TODO(vixl): Let the MacroAssembler handle MemOperand.
995 __ Add(temp_base, src.base(), src.offset());
996 MemOperand base = MemOperand(temp_base);
997 switch (type) {
998 case Primitive::kPrimBoolean:
999 __ Ldarb(Register(dst), base);
1000 break;
1001 case Primitive::kPrimByte:
1002 __ Ldarb(Register(dst), base);
1003 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1004 break;
1005 case Primitive::kPrimChar:
1006 __ Ldarh(Register(dst), base);
1007 break;
1008 case Primitive::kPrimShort:
1009 __ Ldarh(Register(dst), base);
1010 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1011 break;
1012 case Primitive::kPrimInt:
1013 case Primitive::kPrimNot:
1014 case Primitive::kPrimLong:
1015 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
1016 __ Ldar(Register(dst), base);
1017 break;
1018 case Primitive::kPrimFloat:
1019 case Primitive::kPrimDouble: {
1020 DCHECK(dst.IsFPRegister());
1021 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
1022
1023 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1024 __ Ldar(temp, base);
1025 __ Fmov(FPRegister(dst), temp);
1026 break;
1027 }
1028 case Primitive::kPrimVoid:
1029 LOG(FATAL) << "Unreachable type " << type;
1030 }
1031}
1032
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001033void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001034 CPURegister src,
1035 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001036 switch (type) {
1037 case Primitive::kPrimBoolean:
1038 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001039 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001040 break;
1041 case Primitive::kPrimChar:
1042 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001043 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001044 break;
1045 case Primitive::kPrimInt:
1046 case Primitive::kPrimNot:
1047 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001048 case Primitive::kPrimFloat:
1049 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001050 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1051 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001052 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001053 case Primitive::kPrimVoid:
1054 LOG(FATAL) << "Unreachable type " << type;
1055 }
1056}
1057
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001058void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1059 CPURegister src,
1060 const MemOperand& dst) {
1061 UseScratchRegisterScope temps(GetVIXLAssembler());
1062 Register temp_base = temps.AcquireX();
1063
1064 DCHECK(!dst.IsRegisterOffset());
1065 DCHECK(!dst.IsPreIndex());
1066 DCHECK(!dst.IsPostIndex());
1067
1068 // TODO(vixl): Let the MacroAssembler handle this.
1069 __ Add(temp_base, dst.base(), dst.offset());
1070 MemOperand base = MemOperand(temp_base);
1071 switch (type) {
1072 case Primitive::kPrimBoolean:
1073 case Primitive::kPrimByte:
1074 __ Stlrb(Register(src), base);
1075 break;
1076 case Primitive::kPrimChar:
1077 case Primitive::kPrimShort:
1078 __ Stlrh(Register(src), base);
1079 break;
1080 case Primitive::kPrimInt:
1081 case Primitive::kPrimNot:
1082 case Primitive::kPrimLong:
1083 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1084 __ Stlr(Register(src), base);
1085 break;
1086 case Primitive::kPrimFloat:
1087 case Primitive::kPrimDouble: {
1088 DCHECK(src.IsFPRegister());
1089 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1090
1091 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1092 __ Fmov(temp, FPRegister(src));
1093 __ Stlr(temp, base);
1094 break;
1095 }
1096 case Primitive::kPrimVoid:
1097 LOG(FATAL) << "Unreachable type " << type;
1098 }
1099}
1100
Alexandre Rames67555f72014-11-18 10:55:16 +00001101void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) {
1102 DCHECK(current_method.IsW());
1103 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1104}
1105
1106void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1107 HInstruction* instruction,
1108 uint32_t dex_pc) {
1109 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1110 __ Blr(lr);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001111 if (instruction != nullptr) {
1112 RecordPcInfo(instruction, dex_pc);
1113 DCHECK(instruction->IsSuspendCheck()
1114 || instruction->IsBoundsCheck()
1115 || instruction->IsNullCheck()
1116 || instruction->IsDivZeroCheck()
1117 || !IsLeafMethod());
1118 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001119}
1120
1121void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1122 vixl::Register class_reg) {
1123 UseScratchRegisterScope temps(GetVIXLAssembler());
1124 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001125 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1126
Serban Constantinescu02164b32014-11-13 14:05:07 +00001127 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001128 if (kUseAcquireRelease) {
1129 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1130 __ Add(temp, class_reg, status_offset);
1131 __ Ldar(temp, HeapOperand(temp));
1132 __ Cmp(temp, mirror::Class::kStatusInitialized);
1133 __ B(lt, slow_path->GetEntryLabel());
1134 } else {
1135 __ Ldr(temp, HeapOperand(class_reg, status_offset));
1136 __ Cmp(temp, mirror::Class::kStatusInitialized);
1137 __ B(lt, slow_path->GetEntryLabel());
1138 __ Dmb(InnerShareable, BarrierReads);
1139 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001140 __ Bind(slow_path->GetExitLabel());
1141}
Alexandre Rames5319def2014-10-23 10:03:10 +01001142
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001143void InstructionCodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
1144 BarrierType type = BarrierAll;
1145
1146 switch (kind) {
1147 case MemBarrierKind::kAnyAny:
1148 case MemBarrierKind::kAnyStore: {
1149 type = BarrierAll;
1150 break;
1151 }
1152 case MemBarrierKind::kLoadAny: {
1153 type = BarrierReads;
1154 break;
1155 }
1156 case MemBarrierKind::kStoreStore: {
1157 type = BarrierWrites;
1158 break;
1159 }
1160 default:
1161 LOG(FATAL) << "Unexpected memory barrier " << kind;
1162 }
1163 __ Dmb(InnerShareable, type);
1164}
1165
Serban Constantinescu02164b32014-11-13 14:05:07 +00001166void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1167 HBasicBlock* successor) {
1168 SuspendCheckSlowPathARM64* slow_path =
1169 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1170 codegen_->AddSlowPath(slow_path);
1171 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1172 Register temp = temps.AcquireW();
1173
1174 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1175 if (successor == nullptr) {
1176 __ Cbnz(temp, slow_path->GetEntryLabel());
1177 __ Bind(slow_path->GetReturnLabel());
1178 } else {
1179 __ Cbz(temp, codegen_->GetLabelOf(successor));
1180 __ B(slow_path->GetEntryLabel());
1181 // slow_path will return to GetLabelOf(successor).
1182 }
1183}
1184
Alexandre Rames5319def2014-10-23 10:03:10 +01001185InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1186 CodeGeneratorARM64* codegen)
1187 : HGraphVisitor(graph),
1188 assembler_(codegen->GetAssembler()),
1189 codegen_(codegen) {}
1190
1191#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001192 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001193
1194#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1195
1196enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001197 // Using a base helps identify when we hit such breakpoints.
1198 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001199#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1200 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1201#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1202};
1203
1204#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
1205 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001206 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +01001207 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1208 } \
1209 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1210 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1211 locations->SetOut(Location::Any()); \
1212 }
1213 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1214#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1215
1216#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001217#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001218
Alexandre Rames67555f72014-11-18 10:55:16 +00001219void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001220 DCHECK_EQ(instr->InputCount(), 2U);
1221 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1222 Primitive::Type type = instr->GetResultType();
1223 switch (type) {
1224 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001225 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001226 locations->SetInAt(0, Location::RequiresRegister());
1227 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001228 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001229 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001230
1231 case Primitive::kPrimFloat:
1232 case Primitive::kPrimDouble:
1233 locations->SetInAt(0, Location::RequiresFpuRegister());
1234 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001235 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001236 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001237
Alexandre Rames5319def2014-10-23 10:03:10 +01001238 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001239 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001240 }
1241}
1242
Alexandre Rames67555f72014-11-18 10:55:16 +00001243void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001244 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001245
1246 switch (type) {
1247 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001248 case Primitive::kPrimLong: {
1249 Register dst = OutputRegister(instr);
1250 Register lhs = InputRegisterAt(instr, 0);
1251 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001252 if (instr->IsAdd()) {
1253 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001254 } else if (instr->IsAnd()) {
1255 __ And(dst, lhs, rhs);
1256 } else if (instr->IsOr()) {
1257 __ Orr(dst, lhs, rhs);
1258 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001259 __ Sub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001260 } else {
1261 DCHECK(instr->IsXor());
1262 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001263 }
1264 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001265 }
1266 case Primitive::kPrimFloat:
1267 case Primitive::kPrimDouble: {
1268 FPRegister dst = OutputFPRegister(instr);
1269 FPRegister lhs = InputFPRegisterAt(instr, 0);
1270 FPRegister rhs = InputFPRegisterAt(instr, 1);
1271 if (instr->IsAdd()) {
1272 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001273 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001274 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001275 } else {
1276 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001277 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001278 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001279 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001280 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001281 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001282 }
1283}
1284
Serban Constantinescu02164b32014-11-13 14:05:07 +00001285void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1286 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1287
1288 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1289 Primitive::Type type = instr->GetResultType();
1290 switch (type) {
1291 case Primitive::kPrimInt:
1292 case Primitive::kPrimLong: {
1293 locations->SetInAt(0, Location::RequiresRegister());
1294 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1295 locations->SetOut(Location::RequiresRegister());
1296 break;
1297 }
1298 default:
1299 LOG(FATAL) << "Unexpected shift type " << type;
1300 }
1301}
1302
1303void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1304 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1305
1306 Primitive::Type type = instr->GetType();
1307 switch (type) {
1308 case Primitive::kPrimInt:
1309 case Primitive::kPrimLong: {
1310 Register dst = OutputRegister(instr);
1311 Register lhs = InputRegisterAt(instr, 0);
1312 Operand rhs = InputOperandAt(instr, 1);
1313 if (rhs.IsImmediate()) {
1314 uint32_t shift_value = (type == Primitive::kPrimInt)
1315 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1316 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1317 if (instr->IsShl()) {
1318 __ Lsl(dst, lhs, shift_value);
1319 } else if (instr->IsShr()) {
1320 __ Asr(dst, lhs, shift_value);
1321 } else {
1322 __ Lsr(dst, lhs, shift_value);
1323 }
1324 } else {
1325 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1326
1327 if (instr->IsShl()) {
1328 __ Lsl(dst, lhs, rhs_reg);
1329 } else if (instr->IsShr()) {
1330 __ Asr(dst, lhs, rhs_reg);
1331 } else {
1332 __ Lsr(dst, lhs, rhs_reg);
1333 }
1334 }
1335 break;
1336 }
1337 default:
1338 LOG(FATAL) << "Unexpected shift operation type " << type;
1339 }
1340}
1341
Alexandre Rames5319def2014-10-23 10:03:10 +01001342void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001343 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001344}
1345
1346void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001347 HandleBinaryOp(instruction);
1348}
1349
1350void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1351 HandleBinaryOp(instruction);
1352}
1353
1354void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1355 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001356}
1357
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001358void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1359 LocationSummary* locations =
1360 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1361 locations->SetInAt(0, Location::RequiresRegister());
1362 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1363 locations->SetOut(Location::RequiresRegister());
1364}
1365
1366void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1367 LocationSummary* locations = instruction->GetLocations();
1368 Primitive::Type type = instruction->GetType();
1369 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001370 Location index = locations->InAt(1);
1371 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001372 MemOperand source = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001373 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001374
1375 if (index.IsConstant()) {
1376 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001377 source = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001378 } else {
1379 Register temp = temps.AcquireSameSizeAs(obj);
1380 Register index_reg = RegisterFrom(index, Primitive::kPrimInt);
1381 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001382 source = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001383 }
1384
Alexandre Rames67555f72014-11-18 10:55:16 +00001385 codegen_->Load(type, OutputCPURegister(instruction), source);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001386}
1387
Alexandre Rames5319def2014-10-23 10:03:10 +01001388void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1389 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1390 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001391 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001392}
1393
1394void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
1395 __ Ldr(OutputRegister(instruction),
1396 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
1397}
1398
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001399void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
1400 Primitive::Type value_type = instruction->GetComponentType();
1401 bool is_object = value_type == Primitive::kPrimNot;
1402 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1403 instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
1404 if (is_object) {
1405 InvokeRuntimeCallingConvention calling_convention;
1406 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1407 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1408 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1409 } else {
1410 locations->SetInAt(0, Location::RequiresRegister());
1411 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1412 locations->SetInAt(2, Location::RequiresRegister());
1413 }
1414}
1415
1416void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1417 Primitive::Type value_type = instruction->GetComponentType();
1418 if (value_type == Primitive::kPrimNot) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001419 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
1420
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001421 } else {
1422 LocationSummary* locations = instruction->GetLocations();
1423 Register obj = InputRegisterAt(instruction, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001424 CPURegister value = InputCPURegisterAt(instruction, 2);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001425 Location index = locations->InAt(1);
1426 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001427 MemOperand destination = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001428 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001429
1430 if (index.IsConstant()) {
1431 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001432 destination = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001433 } else {
1434 Register temp = temps.AcquireSameSizeAs(obj);
1435 Register index_reg = InputRegisterAt(instruction, 1);
1436 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001437 destination = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001438 }
1439
1440 codegen_->Store(value_type, value, destination);
1441 }
1442}
1443
Alexandre Rames67555f72014-11-18 10:55:16 +00001444void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1445 LocationSummary* locations =
1446 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1447 locations->SetInAt(0, Location::RequiresRegister());
1448 locations->SetInAt(1, Location::RequiresRegister());
1449 if (instruction->HasUses()) {
1450 locations->SetOut(Location::SameAsFirstInput());
1451 }
1452}
1453
1454void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001455 LocationSummary* locations = instruction->GetLocations();
1456 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1457 instruction, locations->InAt(0), locations->InAt(1));
Alexandre Rames67555f72014-11-18 10:55:16 +00001458 codegen_->AddSlowPath(slow_path);
1459
1460 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1461 __ B(slow_path->GetEntryLabel(), hs);
1462}
1463
1464void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
1465 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1466 instruction, LocationSummary::kCallOnSlowPath);
1467 locations->SetInAt(0, Location::RequiresRegister());
1468 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001469 locations->AddTemp(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001470}
1471
1472void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001473 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +00001474 Register obj = InputRegisterAt(instruction, 0);;
1475 Register cls = InputRegisterAt(instruction, 1);;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001476 Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
Alexandre Rames67555f72014-11-18 10:55:16 +00001477
Alexandre Rames3e69f162014-12-10 10:36:50 +00001478 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1479 instruction, locations->InAt(1), LocationFrom(obj_cls), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001480 codegen_->AddSlowPath(slow_path);
1481
1482 // TODO: avoid this check if we know obj is not null.
1483 __ Cbz(obj, slow_path->GetExitLabel());
1484 // Compare the class of `obj` with `cls`.
Alexandre Rames3e69f162014-12-10 10:36:50 +00001485 __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
1486 __ Cmp(obj_cls, cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00001487 __ B(ne, slow_path->GetEntryLabel());
1488 __ Bind(slow_path->GetExitLabel());
1489}
1490
1491void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1492 LocationSummary* locations =
1493 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1494 locations->SetInAt(0, Location::RequiresRegister());
1495 if (check->HasUses()) {
1496 locations->SetOut(Location::SameAsFirstInput());
1497 }
1498}
1499
1500void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1501 // We assume the class is not null.
1502 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1503 check->GetLoadClass(), check, check->GetDexPc(), true);
1504 codegen_->AddSlowPath(slow_path);
1505 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1506}
1507
Serban Constantinescu02164b32014-11-13 14:05:07 +00001508void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001509 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00001510 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1511 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001512 switch (in_type) {
1513 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001514 locations->SetInAt(0, Location::RequiresRegister());
1515 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
1516 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1517 break;
1518 }
1519 case Primitive::kPrimFloat:
1520 case Primitive::kPrimDouble: {
1521 locations->SetInAt(0, Location::RequiresFpuRegister());
1522 locations->SetInAt(1, Location::RequiresFpuRegister());
1523 locations->SetOut(Location::RequiresRegister());
1524 break;
1525 }
1526 default:
1527 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1528 }
1529}
1530
1531void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1532 Primitive::Type in_type = compare->InputAt(0)->GetType();
1533
1534 // 0 if: left == right
1535 // 1 if: left > right
1536 // -1 if: left < right
1537 switch (in_type) {
1538 case Primitive::kPrimLong: {
1539 Register result = OutputRegister(compare);
1540 Register left = InputRegisterAt(compare, 0);
1541 Operand right = InputOperandAt(compare, 1);
1542
1543 __ Cmp(left, right);
1544 __ Cset(result, ne);
1545 __ Cneg(result, result, lt);
1546 break;
1547 }
1548 case Primitive::kPrimFloat:
1549 case Primitive::kPrimDouble: {
1550 Register result = OutputRegister(compare);
1551 FPRegister left = InputFPRegisterAt(compare, 0);
1552 FPRegister right = InputFPRegisterAt(compare, 1);
1553
1554 __ Fcmp(left, right);
1555 if (compare->IsGtBias()) {
1556 __ Cset(result, ne);
1557 } else {
1558 __ Csetm(result, ne);
1559 }
1560 __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
Alexandre Rames5319def2014-10-23 10:03:10 +01001561 break;
1562 }
1563 default:
1564 LOG(FATAL) << "Unimplemented compare type " << in_type;
1565 }
1566}
1567
1568void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1569 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1570 locations->SetInAt(0, Location::RequiresRegister());
1571 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1572 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001573 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001574 }
1575}
1576
1577void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1578 if (!instruction->NeedsMaterialization()) {
1579 return;
1580 }
1581
1582 LocationSummary* locations = instruction->GetLocations();
1583 Register lhs = InputRegisterAt(instruction, 0);
1584 Operand rhs = InputOperandAt(instruction, 1);
1585 Register res = RegisterFrom(locations->Out(), instruction->GetType());
1586 Condition cond = ARM64Condition(instruction->GetCondition());
1587
1588 __ Cmp(lhs, rhs);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001589 __ Cset(res, cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001590}
1591
1592#define FOR_EACH_CONDITION_INSTRUCTION(M) \
1593 M(Equal) \
1594 M(NotEqual) \
1595 M(LessThan) \
1596 M(LessThanOrEqual) \
1597 M(GreaterThan) \
1598 M(GreaterThanOrEqual)
1599#define DEFINE_CONDITION_VISITORS(Name) \
1600void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
1601void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1602FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00001603#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01001604#undef FOR_EACH_CONDITION_INSTRUCTION
1605
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001606void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1607 LocationSummary* locations =
1608 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1609 switch (div->GetResultType()) {
1610 case Primitive::kPrimInt:
1611 case Primitive::kPrimLong:
1612 locations->SetInAt(0, Location::RequiresRegister());
1613 locations->SetInAt(1, Location::RequiresRegister());
1614 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1615 break;
1616
1617 case Primitive::kPrimFloat:
1618 case Primitive::kPrimDouble:
1619 locations->SetInAt(0, Location::RequiresFpuRegister());
1620 locations->SetInAt(1, Location::RequiresFpuRegister());
1621 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1622 break;
1623
1624 default:
1625 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1626 }
1627}
1628
1629void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1630 Primitive::Type type = div->GetResultType();
1631 switch (type) {
1632 case Primitive::kPrimInt:
1633 case Primitive::kPrimLong:
1634 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
1635 break;
1636
1637 case Primitive::kPrimFloat:
1638 case Primitive::kPrimDouble:
1639 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1640 break;
1641
1642 default:
1643 LOG(FATAL) << "Unexpected div type " << type;
1644 }
1645}
1646
Alexandre Rames67555f72014-11-18 10:55:16 +00001647void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1648 LocationSummary* locations =
1649 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1650 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1651 if (instruction->HasUses()) {
1652 locations->SetOut(Location::SameAsFirstInput());
1653 }
1654}
1655
1656void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1657 SlowPathCodeARM64* slow_path =
1658 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
1659 codegen_->AddSlowPath(slow_path);
1660 Location value = instruction->GetLocations()->InAt(0);
1661
Alexandre Rames3e69f162014-12-10 10:36:50 +00001662 Primitive::Type type = instruction->GetType();
1663
1664 if ((type != Primitive::kPrimInt) && (type != Primitive::kPrimLong)) {
1665 LOG(FATAL) << "Unexpected type " << type << "for DivZeroCheck.";
1666 return;
1667 }
1668
Alexandre Rames67555f72014-11-18 10:55:16 +00001669 if (value.IsConstant()) {
1670 int64_t divisor = Int64ConstantFrom(value);
1671 if (divisor == 0) {
1672 __ B(slow_path->GetEntryLabel());
1673 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001674 // A division by a non-null constant is valid. We don't need to perform
1675 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00001676 }
1677 } else {
1678 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
1679 }
1680}
1681
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001682void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1683 LocationSummary* locations =
1684 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1685 locations->SetOut(Location::ConstantLocation(constant));
1686}
1687
1688void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1689 UNUSED(constant);
1690 // Will be generated at use site.
1691}
1692
Alexandre Rames5319def2014-10-23 10:03:10 +01001693void LocationsBuilderARM64::VisitExit(HExit* exit) {
1694 exit->SetLocations(nullptr);
1695}
1696
1697void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001698 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +01001699 if (kIsDebugBuild) {
1700 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
Alexandre Rames67555f72014-11-18 10:55:16 +00001701 __ Brk(__LINE__); // TODO: Introduce special markers for such code locations.
Alexandre Rames5319def2014-10-23 10:03:10 +01001702 }
1703}
1704
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001705void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
1706 LocationSummary* locations =
1707 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1708 locations->SetOut(Location::ConstantLocation(constant));
1709}
1710
1711void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
1712 UNUSED(constant);
1713 // Will be generated at use site.
1714}
1715
Alexandre Rames5319def2014-10-23 10:03:10 +01001716void LocationsBuilderARM64::VisitGoto(HGoto* got) {
1717 got->SetLocations(nullptr);
1718}
1719
1720void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
1721 HBasicBlock* successor = got->GetSuccessor();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001722 DCHECK(!successor->IsExitBlock());
1723 HBasicBlock* block = got->GetBlock();
1724 HInstruction* previous = got->GetPrevious();
1725 HLoopInformation* info = block->GetLoopInformation();
1726
1727 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
1728 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
1729 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1730 return;
1731 }
1732 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1733 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1734 }
1735 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001736 __ B(codegen_->GetLabelOf(successor));
1737 }
1738}
1739
1740void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
1741 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1742 HInstruction* cond = if_instr->InputAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001743 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001744 locations->SetInAt(0, Location::RequiresRegister());
1745 }
1746}
1747
1748void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
1749 HInstruction* cond = if_instr->InputAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001750 HCondition* condition = cond->AsCondition();
1751 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1752 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1753
Serban Constantinescu02164b32014-11-13 14:05:07 +00001754 if (cond->IsIntConstant()) {
1755 int32_t cond_value = cond->AsIntConstant()->GetValue();
1756 if (cond_value == 1) {
1757 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfTrueSuccessor())) {
1758 __ B(true_target);
1759 }
1760 return;
1761 } else {
1762 DCHECK_EQ(cond_value, 0);
1763 }
1764 } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001765 // The condition instruction has been materialized, compare the output to 0.
1766 Location cond_val = if_instr->GetLocations()->InAt(0);
1767 DCHECK(cond_val.IsRegister());
1768 __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001769 } else {
1770 // The condition instruction has not been materialized, use its inputs as
1771 // the comparison and its condition as the branch condition.
1772 Register lhs = InputRegisterAt(condition, 0);
1773 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001774 Condition arm64_cond = ARM64Condition(condition->GetCondition());
1775 if ((arm64_cond == eq || arm64_cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
1776 if (arm64_cond == eq) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001777 __ Cbz(lhs, true_target);
1778 } else {
1779 __ Cbnz(lhs, true_target);
1780 }
1781 } else {
1782 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001783 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001784 }
1785 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001786 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
1787 __ B(false_target);
1788 }
1789}
1790
1791void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001792 LocationSummary* locations =
1793 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001794 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001795 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001796}
1797
1798void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001799 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001800
1801 if (instruction->IsVolatile()) {
1802 if (kUseAcquireRelease) {
1803 codegen_->LoadAcquire(instruction->GetType(), OutputCPURegister(instruction), field);
1804 } else {
1805 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
1806 // For IRIW sequential consistency kLoadAny is not sufficient.
1807 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1808 }
1809 } else {
1810 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
1811 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001812}
1813
1814void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001815 LocationSummary* locations =
1816 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001817 locations->SetInAt(0, Location::RequiresRegister());
1818 locations->SetInAt(1, Location::RequiresRegister());
1819}
1820
1821void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001822 Register obj = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001823 CPURegister value = InputCPURegisterAt(instruction, 1);
1824 Offset offset = instruction->GetFieldOffset();
1825 Primitive::Type field_type = instruction->GetFieldType();
1826
1827 if (instruction->IsVolatile()) {
1828 if (kUseAcquireRelease) {
1829 codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
1830 } else {
1831 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
1832 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1833 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1834 }
1835 } else {
1836 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1837 }
1838
1839 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001840 codegen_->MarkGCCard(obj, Register(value));
Alexandre Rames5319def2014-10-23 10:03:10 +01001841 }
1842}
1843
Alexandre Rames67555f72014-11-18 10:55:16 +00001844void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
1845 LocationSummary::CallKind call_kind =
1846 instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
1847 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
1848 locations->SetInAt(0, Location::RequiresRegister());
1849 locations->SetInAt(1, Location::RequiresRegister());
1850 locations->SetOut(Location::RequiresRegister(), true); // The output does overlap inputs.
1851}
1852
1853void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
1854 LocationSummary* locations = instruction->GetLocations();
1855 Register obj = InputRegisterAt(instruction, 0);;
1856 Register cls = InputRegisterAt(instruction, 1);;
1857 Register out = OutputRegister(instruction);
1858
1859 vixl::Label done;
1860
1861 // Return 0 if `obj` is null.
1862 // TODO: Avoid this check if we know `obj` is not null.
1863 __ Mov(out, 0);
1864 __ Cbz(obj, &done);
1865
1866 // Compare the class of `obj` with `cls`.
Serban Constantinescu02164b32014-11-13 14:05:07 +00001867 __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
Alexandre Rames67555f72014-11-18 10:55:16 +00001868 __ Cmp(out, cls);
1869 if (instruction->IsClassFinal()) {
1870 // Classes must be equal for the instanceof to succeed.
1871 __ Cset(out, eq);
1872 } else {
1873 // If the classes are not equal, we go into a slow path.
1874 DCHECK(locations->OnlyCallsOnSlowPath());
1875 SlowPathCodeARM64* slow_path =
Alexandre Rames3e69f162014-12-10 10:36:50 +00001876 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1877 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001878 codegen_->AddSlowPath(slow_path);
1879 __ B(ne, slow_path->GetEntryLabel());
1880 __ Mov(out, 1);
1881 __ Bind(slow_path->GetExitLabel());
1882 }
1883
1884 __ Bind(&done);
1885}
1886
Alexandre Rames5319def2014-10-23 10:03:10 +01001887void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
1888 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1889 locations->SetOut(Location::ConstantLocation(constant));
1890}
1891
1892void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
1893 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001894 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001895}
1896
Alexandre Rames5319def2014-10-23 10:03:10 +01001897void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
1898 LocationSummary* locations =
1899 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1900 locations->AddTemp(LocationFrom(x0));
1901
1902 InvokeDexCallingConventionVisitor calling_convention_visitor;
1903 for (size_t i = 0; i < invoke->InputCount(); i++) {
1904 HInstruction* input = invoke->InputAt(i);
1905 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1906 }
1907
1908 Primitive::Type return_type = invoke->GetType();
1909 if (return_type != Primitive::kPrimVoid) {
1910 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
1911 }
1912}
1913
Alexandre Rames67555f72014-11-18 10:55:16 +00001914void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1915 HandleInvoke(invoke);
1916}
1917
1918void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1919 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1920 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1921 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1922 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1923 Location receiver = invoke->GetLocations()->InAt(0);
1924 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00001925 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00001926
1927 // The register ip1 is required to be used for the hidden argument in
1928 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
1929 UseScratchRegisterScope scratch_scope(GetVIXLAssembler());
1930 scratch_scope.Exclude(ip1);
1931 __ Mov(ip1, invoke->GetDexMethodIndex());
1932
1933 // temp = object->GetClass();
1934 if (receiver.IsStackSlot()) {
1935 __ Ldr(temp, StackOperandFrom(receiver));
1936 __ Ldr(temp, HeapOperand(temp, class_offset));
1937 } else {
1938 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
1939 }
1940 // temp = temp->GetImtEntryAt(method_offset);
1941 __ Ldr(temp, HeapOperand(temp, method_offset));
1942 // lr = temp->GetEntryPoint();
1943 __ Ldr(lr, HeapOperand(temp, entry_point));
1944 // lr();
1945 __ Blr(lr);
1946 DCHECK(!codegen_->IsLeafMethod());
1947 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1948}
1949
1950void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1951 HandleInvoke(invoke);
1952}
1953
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001954void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001955 HandleInvoke(invoke);
1956}
1957
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001958void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001959 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1960 // Make sure that ArtMethod* is passed in W0 as per the calling convention
1961 DCHECK(temp.Is(w0));
1962 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
1963 invoke->GetIndexInDexCache() * kHeapRefSize;
1964
1965 // TODO: Implement all kinds of calls:
1966 // 1) boot -> boot
1967 // 2) app -> boot
1968 // 3) app -> app
1969 //
1970 // Currently we implement the app -> app logic, which looks up in the resolve cache.
1971
1972 // temp = method;
Alexandre Rames67555f72014-11-18 10:55:16 +00001973 codegen_->LoadCurrentMethod(temp);
Nicolas Geoffray4e44c822014-12-17 12:25:12 +00001974 // temp = temp->dex_cache_resolved_methods_;
1975 __ Ldr(temp, HeapOperand(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset()));
1976 // temp = temp[index_in_cache];
1977 __ Ldr(temp, HeapOperand(temp, index_in_cache));
Alexandre Rames5319def2014-10-23 10:03:10 +01001978 // lr = temp->entry_point_from_quick_compiled_code_;
Serban Constantinescu02164b32014-11-13 14:05:07 +00001979 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1980 kArm64WordSize)));
Alexandre Rames5319def2014-10-23 10:03:10 +01001981 // lr();
1982 __ Blr(lr);
1983
1984 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1985 DCHECK(!codegen_->IsLeafMethod());
1986}
1987
1988void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1989 LocationSummary* locations = invoke->GetLocations();
1990 Location receiver = locations->InAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001991 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01001992 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
1993 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1994 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00001995 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames5319def2014-10-23 10:03:10 +01001996
1997 // temp = object->GetClass();
1998 if (receiver.IsStackSlot()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001999 __ Ldr(temp, MemOperand(sp, receiver.GetStackIndex()));
2000 __ Ldr(temp, HeapOperand(temp, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002001 } else {
2002 DCHECK(receiver.IsRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002003 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002004 }
2005 // temp = temp->GetMethodAt(method_offset);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002006 __ Ldr(temp, HeapOperand(temp, method_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002007 // lr = temp->GetEntryPoint();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002008 __ Ldr(lr, HeapOperand(temp, entry_point.SizeValue()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002009 // lr();
2010 __ Blr(lr);
2011 DCHECK(!codegen_->IsLeafMethod());
2012 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2013}
2014
Alexandre Rames67555f72014-11-18 10:55:16 +00002015void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
2016 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
2017 : LocationSummary::kNoCall;
2018 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2019 locations->SetOut(Location::RequiresRegister());
2020}
2021
2022void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
2023 Register out = OutputRegister(cls);
2024 if (cls->IsReferrersClass()) {
2025 DCHECK(!cls->CanCallRuntime());
2026 DCHECK(!cls->MustGenerateClinitCheck());
2027 codegen_->LoadCurrentMethod(out);
2028 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2029 } else {
2030 DCHECK(cls->CanCallRuntime());
2031 codegen_->LoadCurrentMethod(out);
2032 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002033 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002034
2035 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2036 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2037 codegen_->AddSlowPath(slow_path);
2038 __ Cbz(out, slow_path->GetEntryLabel());
2039 if (cls->MustGenerateClinitCheck()) {
2040 GenerateClassInitializationCheck(slow_path, out);
2041 } else {
2042 __ Bind(slow_path->GetExitLabel());
2043 }
2044 }
2045}
2046
2047void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
2048 LocationSummary* locations =
2049 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2050 locations->SetOut(Location::RequiresRegister());
2051}
2052
2053void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
2054 MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
2055 __ Ldr(OutputRegister(instruction), exception);
2056 __ Str(wzr, exception);
2057}
2058
Alexandre Rames5319def2014-10-23 10:03:10 +01002059void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
2060 load->SetLocations(nullptr);
2061}
2062
2063void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
2064 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002065 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01002066}
2067
Alexandre Rames67555f72014-11-18 10:55:16 +00002068void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
2069 LocationSummary* locations =
2070 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2071 locations->SetOut(Location::RequiresRegister());
2072}
2073
2074void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
2075 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
2076 codegen_->AddSlowPath(slow_path);
2077
2078 Register out = OutputRegister(load);
2079 codegen_->LoadCurrentMethod(out);
Mathieu Chartiereace4582014-11-24 18:29:54 -08002080 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2081 __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002082 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002083 __ Cbz(out, slow_path->GetEntryLabel());
2084 __ Bind(slow_path->GetExitLabel());
2085}
2086
Alexandre Rames5319def2014-10-23 10:03:10 +01002087void LocationsBuilderARM64::VisitLocal(HLocal* local) {
2088 local->SetLocations(nullptr);
2089}
2090
2091void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
2092 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
2093}
2094
2095void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
2096 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2097 locations->SetOut(Location::ConstantLocation(constant));
2098}
2099
2100void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
2101 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002102 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01002103}
2104
Alexandre Rames67555f72014-11-18 10:55:16 +00002105void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2106 LocationSummary* locations =
2107 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2108 InvokeRuntimeCallingConvention calling_convention;
2109 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2110}
2111
2112void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2113 codegen_->InvokeRuntime(instruction->IsEnter()
2114 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2115 instruction,
2116 instruction->GetDexPc());
2117}
2118
Alexandre Rames42d641b2014-10-27 14:00:51 +00002119void LocationsBuilderARM64::VisitMul(HMul* mul) {
2120 LocationSummary* locations =
2121 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2122 switch (mul->GetResultType()) {
2123 case Primitive::kPrimInt:
2124 case Primitive::kPrimLong:
2125 locations->SetInAt(0, Location::RequiresRegister());
2126 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002127 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002128 break;
2129
2130 case Primitive::kPrimFloat:
2131 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002132 locations->SetInAt(0, Location::RequiresFpuRegister());
2133 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002134 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002135 break;
2136
2137 default:
2138 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2139 }
2140}
2141
2142void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
2143 switch (mul->GetResultType()) {
2144 case Primitive::kPrimInt:
2145 case Primitive::kPrimLong:
2146 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2147 break;
2148
2149 case Primitive::kPrimFloat:
2150 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002151 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00002152 break;
2153
2154 default:
2155 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2156 }
2157}
2158
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002159void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
2160 LocationSummary* locations =
2161 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2162 switch (neg->GetResultType()) {
2163 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00002164 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002165 locations->SetInAt(0, Location::RegisterOrConstant(neg->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00002166 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002167 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002168
2169 case Primitive::kPrimFloat:
2170 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002171 locations->SetInAt(0, Location::RequiresFpuRegister());
2172 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002173 break;
2174
2175 default:
2176 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2177 }
2178}
2179
2180void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
2181 switch (neg->GetResultType()) {
2182 case Primitive::kPrimInt:
2183 case Primitive::kPrimLong:
2184 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
2185 break;
2186
2187 case Primitive::kPrimFloat:
2188 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002189 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002190 break;
2191
2192 default:
2193 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2194 }
2195}
2196
2197void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
2198 LocationSummary* locations =
2199 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2200 InvokeRuntimeCallingConvention calling_convention;
2201 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2202 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2203 locations->SetOut(LocationFrom(x0));
2204 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(2)));
2205}
2206
2207void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
2208 LocationSummary* locations = instruction->GetLocations();
2209 InvokeRuntimeCallingConvention calling_convention;
2210 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2211 DCHECK(type_index.Is(w0));
2212 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2213 DCHECK(current_method.Is(w1));
Alexandre Rames67555f72014-11-18 10:55:16 +00002214 codegen_->LoadCurrentMethod(current_method);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002215 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002216 codegen_->InvokeRuntime(
2217 QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002218}
2219
Alexandre Rames5319def2014-10-23 10:03:10 +01002220void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
2221 LocationSummary* locations =
2222 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2223 InvokeRuntimeCallingConvention calling_convention;
2224 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2225 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2226 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
2227}
2228
2229void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
2230 LocationSummary* locations = instruction->GetLocations();
2231 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2232 DCHECK(type_index.Is(w0));
2233 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2234 DCHECK(current_method.Is(w1));
Alexandre Rames67555f72014-11-18 10:55:16 +00002235 codegen_->LoadCurrentMethod(current_method);
Alexandre Rames5319def2014-10-23 10:03:10 +01002236 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002237 codegen_->InvokeRuntime(
2238 QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01002239}
2240
2241void LocationsBuilderARM64::VisitNot(HNot* instruction) {
2242 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00002243 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002244 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002245}
2246
2247void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
2248 switch (instruction->InputAt(0)->GetType()) {
2249 case Primitive::kPrimBoolean:
2250 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1));
2251 break;
2252
2253 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002254 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01002255 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002256 break;
2257
2258 default:
2259 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
2260 }
2261}
2262
2263void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
2264 LocationSummary* locations =
2265 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2266 locations->SetInAt(0, Location::RequiresRegister());
2267 if (instruction->HasUses()) {
2268 locations->SetOut(Location::SameAsFirstInput());
2269 }
2270}
2271
2272void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
2273 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
2274 codegen_->AddSlowPath(slow_path);
2275
2276 LocationSummary* locations = instruction->GetLocations();
2277 Location obj = locations->InAt(0);
2278 if (obj.IsRegister()) {
2279 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
2280 } else {
2281 DCHECK(obj.IsConstant()) << obj;
2282 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2283 __ B(slow_path->GetEntryLabel());
2284 }
2285}
2286
Alexandre Rames67555f72014-11-18 10:55:16 +00002287void LocationsBuilderARM64::VisitOr(HOr* instruction) {
2288 HandleBinaryOp(instruction);
2289}
2290
2291void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
2292 HandleBinaryOp(instruction);
2293}
2294
Alexandre Rames3e69f162014-12-10 10:36:50 +00002295void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2296 LOG(FATAL) << "Unreachable";
2297}
2298
2299void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
2300 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2301}
2302
Alexandre Rames5319def2014-10-23 10:03:10 +01002303void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
2304 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2305 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2306 if (location.IsStackSlot()) {
2307 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2308 } else if (location.IsDoubleStackSlot()) {
2309 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2310 }
2311 locations->SetOut(location);
2312}
2313
2314void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
2315 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002316 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002317}
2318
2319void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
2320 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2321 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2322 locations->SetInAt(i, Location::Any());
2323 }
2324 locations->SetOut(Location::Any());
2325}
2326
2327void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002328 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002329 LOG(FATAL) << "Unreachable";
2330}
2331
Serban Constantinescu02164b32014-11-13 14:05:07 +00002332void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002333 Primitive::Type type = rem->GetResultType();
2334 LocationSummary::CallKind call_kind = IsFPType(type) ? LocationSummary::kCall
2335 : LocationSummary::kNoCall;
2336 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2337
2338 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002339 case Primitive::kPrimInt:
2340 case Primitive::kPrimLong:
2341 locations->SetInAt(0, Location::RequiresRegister());
2342 locations->SetInAt(1, Location::RequiresRegister());
2343 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2344 break;
2345
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002346 case Primitive::kPrimFloat:
2347 case Primitive::kPrimDouble: {
2348 InvokeRuntimeCallingConvention calling_convention;
2349 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
2350 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
2351 locations->SetOut(calling_convention.GetReturnLocation(type));
2352
2353 break;
2354 }
2355
Serban Constantinescu02164b32014-11-13 14:05:07 +00002356 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002357 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00002358 }
2359}
2360
2361void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
2362 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002363
Serban Constantinescu02164b32014-11-13 14:05:07 +00002364 switch (type) {
2365 case Primitive::kPrimInt:
2366 case Primitive::kPrimLong: {
2367 UseScratchRegisterScope temps(GetVIXLAssembler());
2368 Register dividend = InputRegisterAt(rem, 0);
2369 Register divisor = InputRegisterAt(rem, 1);
2370 Register output = OutputRegister(rem);
2371 Register temp = temps.AcquireSameSizeAs(output);
2372
2373 __ Sdiv(temp, dividend, divisor);
2374 __ Msub(output, temp, divisor, dividend);
2375 break;
2376 }
2377
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002378 case Primitive::kPrimFloat:
2379 case Primitive::kPrimDouble: {
2380 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
2381 : QUICK_ENTRY_POINT(pFmod);
2382 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc());
2383 break;
2384 }
2385
Serban Constantinescu02164b32014-11-13 14:05:07 +00002386 default:
2387 LOG(FATAL) << "Unexpected rem type " << type;
2388 }
2389}
2390
Alexandre Rames5319def2014-10-23 10:03:10 +01002391void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
2392 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2393 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002394 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01002395}
2396
2397void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002398 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002399 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002400 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002401}
2402
2403void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
2404 instruction->SetLocations(nullptr);
2405}
2406
2407void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002408 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002409 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002410 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002411}
2412
Serban Constantinescu02164b32014-11-13 14:05:07 +00002413void LocationsBuilderARM64::VisitShl(HShl* shl) {
2414 HandleShift(shl);
2415}
2416
2417void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
2418 HandleShift(shl);
2419}
2420
2421void LocationsBuilderARM64::VisitShr(HShr* shr) {
2422 HandleShift(shr);
2423}
2424
2425void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
2426 HandleShift(shr);
2427}
2428
Alexandre Rames5319def2014-10-23 10:03:10 +01002429void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
2430 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
2431 Primitive::Type field_type = store->InputAt(1)->GetType();
2432 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002433 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01002434 case Primitive::kPrimBoolean:
2435 case Primitive::kPrimByte:
2436 case Primitive::kPrimChar:
2437 case Primitive::kPrimShort:
2438 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002439 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01002440 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
2441 break;
2442
2443 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002444 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01002445 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
2446 break;
2447
2448 default:
2449 LOG(FATAL) << "Unimplemented local type " << field_type;
2450 }
2451}
2452
2453void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002454 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01002455}
2456
2457void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002458 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002459}
2460
2461void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002462 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002463}
2464
Alexandre Rames67555f72014-11-18 10:55:16 +00002465void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2466 LocationSummary* locations =
2467 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2468 locations->SetInAt(0, Location::RequiresRegister());
2469 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2470}
2471
2472void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002473 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002474
2475 if (instruction->IsVolatile()) {
2476 if (kUseAcquireRelease) {
2477 codegen_->LoadAcquire(instruction->GetType(), OutputCPURegister(instruction), field);
2478 } else {
2479 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2480 // For IRIW sequential consistency kLoadAny is not sufficient.
2481 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2482 }
2483 } else {
2484 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2485 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002486}
2487
2488void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002489 LocationSummary* locations =
2490 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2491 locations->SetInAt(0, Location::RequiresRegister());
2492 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01002493}
2494
Alexandre Rames67555f72014-11-18 10:55:16 +00002495void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002496 Register cls = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002497 CPURegister value = InputCPURegisterAt(instruction, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002498 Offset offset = instruction->GetFieldOffset();
Alexandre Rames67555f72014-11-18 10:55:16 +00002499 Primitive::Type field_type = instruction->GetFieldType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002500
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002501 if (instruction->IsVolatile()) {
2502 if (kUseAcquireRelease) {
2503 codegen_->StoreRelease(field_type, value, HeapOperand(cls, offset));
2504 } else {
2505 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2506 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2507 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2508 }
2509 } else {
2510 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2511 }
2512
2513 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002514 codegen_->MarkGCCard(cls, Register(value));
2515 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002516}
2517
2518void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2519 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2520}
2521
2522void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002523 HBasicBlock* block = instruction->GetBlock();
2524 if (block->GetLoopInformation() != nullptr) {
2525 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2526 // The back edge will generate the suspend check.
2527 return;
2528 }
2529 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2530 // The goto will generate the suspend check.
2531 return;
2532 }
2533 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01002534}
2535
2536void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
2537 temp->SetLocations(nullptr);
2538}
2539
2540void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
2541 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002542 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01002543}
2544
Alexandre Rames67555f72014-11-18 10:55:16 +00002545void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
2546 LocationSummary* locations =
2547 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2548 InvokeRuntimeCallingConvention calling_convention;
2549 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2550}
2551
2552void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
2553 codegen_->InvokeRuntime(
2554 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
2555}
2556
2557void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
2558 LocationSummary* locations =
2559 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2560 Primitive::Type input_type = conversion->GetInputType();
2561 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002562 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00002563 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
2564 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
2565 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
2566 }
2567
2568 if (IsFPType(input_type)) {
2569 locations->SetInAt(0, Location::RequiresFpuRegister());
2570 } else {
2571 locations->SetInAt(0, Location::RequiresRegister());
2572 }
2573
2574 if (IsFPType(result_type)) {
2575 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2576 } else {
2577 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2578 }
2579}
2580
2581void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
2582 Primitive::Type result_type = conversion->GetResultType();
2583 Primitive::Type input_type = conversion->GetInputType();
2584
2585 DCHECK_NE(input_type, result_type);
2586
2587 if (IsIntegralType(result_type) && IsIntegralType(input_type)) {
2588 int result_size = Primitive::ComponentSize(result_type);
2589 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002590 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002591 Register output = OutputRegister(conversion);
2592 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002593 if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
2594 __ Ubfx(output, source, 0, result_size * kBitsPerByte);
2595 } else if ((result_type == Primitive::kPrimChar) ||
2596 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
2597 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002598 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002599 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002600 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002601 } else if (IsFPType(result_type) && IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002602 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
2603 } else if (IsIntegralType(result_type) && IsFPType(input_type)) {
2604 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
2605 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
2606 } else if (IsFPType(result_type) && IsFPType(input_type)) {
2607 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
2608 } else {
2609 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
2610 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00002611 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002612}
Alexandre Rames67555f72014-11-18 10:55:16 +00002613
Serban Constantinescu02164b32014-11-13 14:05:07 +00002614void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
2615 HandleShift(ushr);
2616}
2617
2618void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
2619 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00002620}
2621
2622void LocationsBuilderARM64::VisitXor(HXor* instruction) {
2623 HandleBinaryOp(instruction);
2624}
2625
2626void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
2627 HandleBinaryOp(instruction);
2628}
2629
2630#undef __
2631#undef QUICK_ENTRY_POINT
2632
Alexandre Rames5319def2014-10-23 10:03:10 +01002633} // namespace arm64
2634} // namespace art