blob: 7e9cdac394424042c207f597ec54947a62789ccd [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080020#include "common_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010021#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080022#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080024#include "intrinsics.h"
25#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010026#include "mirror/array-inl.h"
27#include "mirror/art_method.h"
28#include "mirror/class.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000029#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010030#include "thread.h"
31#include "utils/arm64/assembler_arm64.h"
32#include "utils/assembler.h"
33#include "utils/stack_checks.h"
34
35
36using namespace vixl; // NOLINT(build/namespaces)
37
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
44namespace arm64 {
45
Andreas Gampe878d58c2015-01-15 23:24:00 -080046using helpers::CPURegisterFrom;
47using helpers::DRegisterFrom;
48using helpers::FPRegisterFrom;
49using helpers::HeapOperand;
50using helpers::HeapOperandFrom;
51using helpers::InputCPURegisterAt;
52using helpers::InputFPRegisterAt;
53using helpers::InputRegisterAt;
54using helpers::InputOperandAt;
55using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080056using helpers::LocationFrom;
57using helpers::OperandFromMemOperand;
58using helpers::OutputCPURegister;
59using helpers::OutputFPRegister;
60using helpers::OutputRegister;
61using helpers::RegisterFrom;
62using helpers::StackOperandFrom;
63using helpers::VIXLRegCodeFromART;
64using helpers::WRegisterFrom;
65using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000066using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080067using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080068
Alexandre Rames5319def2014-10-23 10:03:10 +010069static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
70static constexpr int kCurrentMethodStackOffset = 0;
71
Alexandre Rames5319def2014-10-23 10:03:10 +010072inline Condition ARM64Condition(IfCondition cond) {
73 switch (cond) {
74 case kCondEQ: return eq;
75 case kCondNE: return ne;
76 case kCondLT: return lt;
77 case kCondLE: return le;
78 case kCondGT: return gt;
79 case kCondGE: return ge;
80 default:
81 LOG(FATAL) << "Unknown if condition";
82 }
83 return nv; // Unreachable.
84}
85
Alexandre Ramesa89086e2014-11-07 17:13:25 +000086Location ARM64ReturnLocation(Primitive::Type return_type) {
87 DCHECK_NE(return_type, Primitive::kPrimVoid);
88 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
89 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
90 // but we use the exact registers for clarity.
91 if (return_type == Primitive::kPrimFloat) {
92 return LocationFrom(s0);
93 } else if (return_type == Primitive::kPrimDouble) {
94 return LocationFrom(d0);
95 } else if (return_type == Primitive::kPrimLong) {
96 return LocationFrom(x0);
97 } else {
98 return LocationFrom(w0);
99 }
100}
101
Alexandre Rames5319def2014-10-23 10:03:10 +0100102Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000103 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100104}
105
Alexandre Rames67555f72014-11-18 10:55:16 +0000106#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
107#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100108
Zheng Xuda403092015-04-24 17:35:39 +0800109// Calculate memory accessing operand for save/restore live registers.
110static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
111 RegisterSet* register_set,
112 int64_t spill_offset,
113 bool is_save) {
114 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
115 codegen->GetNumberOfCoreRegisters(),
116 register_set->GetFloatingPointRegisters(),
117 codegen->GetNumberOfFloatingPointRegisters()));
118
119 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
120 register_set->GetCoreRegisters() & (~callee_saved_core_registers.list()));
121 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
122 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.list()));
123
124 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
125 UseScratchRegisterScope temps(masm);
126
127 Register base = masm->StackPointer();
128 int64_t core_spill_size = core_list.TotalSizeInBytes();
129 int64_t fp_spill_size = fp_list.TotalSizeInBytes();
130 int64_t reg_size = kXRegSizeInBytes;
131 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
132 uint32_t ls_access_size = WhichPowerOf2(reg_size);
133 if (((core_list.Count() > 1) || (fp_list.Count() > 1)) &&
134 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
135 // If the offset does not fit in the instruction's immediate field, use an alternate register
136 // to compute the base address(float point registers spill base address).
137 Register new_base = temps.AcquireSameSizeAs(base);
138 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
139 base = new_base;
140 spill_offset = -core_spill_size;
141 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
142 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
143 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
144 }
145
146 if (is_save) {
147 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
148 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
149 } else {
150 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
151 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
152 }
153}
154
155void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
156 RegisterSet* register_set = locations->GetLiveRegisters();
157 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
158 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
159 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
160 // If the register holds an object, update the stack mask.
161 if (locations->RegisterContainsObject(i)) {
162 locations->SetStackBit(stack_offset / kVRegSize);
163 }
164 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
165 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
166 saved_core_stack_offsets_[i] = stack_offset;
167 stack_offset += kXRegSizeInBytes;
168 }
169 }
170
171 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
172 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
173 register_set->ContainsFloatingPointRegister(i)) {
174 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
175 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
176 saved_fpu_stack_offsets_[i] = stack_offset;
177 stack_offset += kDRegSizeInBytes;
178 }
179 }
180
181 SaveRestoreLiveRegistersHelper(codegen, register_set,
182 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
183}
184
185void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
186 RegisterSet* register_set = locations->GetLiveRegisters();
187 SaveRestoreLiveRegistersHelper(codegen, register_set,
188 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
189}
190
Alexandre Rames5319def2014-10-23 10:03:10 +0100191class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
192 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000193 BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
194 Location index_location,
195 Location length_location)
196 : instruction_(instruction),
197 index_location_(index_location),
198 length_location_(length_location) {}
199
Alexandre Rames5319def2014-10-23 10:03:10 +0100200
Alexandre Rames67555f72014-11-18 10:55:16 +0000201 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000202 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100203 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000204 // We're moving two locations to locations that could overlap, so we need a parallel
205 // move resolver.
206 InvokeRuntimeCallingConvention calling_convention;
207 codegen->EmitParallelMoves(
Nicolas Geoffray90218252015-04-15 11:56:51 +0100208 index_location_, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
209 length_location_, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000210 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000211 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800212 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100213 }
214
215 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000216 HBoundsCheck* const instruction_;
217 const Location index_location_;
218 const Location length_location_;
219
Alexandre Rames5319def2014-10-23 10:03:10 +0100220 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
221};
222
Alexandre Rames67555f72014-11-18 10:55:16 +0000223class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
224 public:
225 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
226
227 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
228 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
229 __ Bind(GetEntryLabel());
230 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000231 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800232 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000233 }
234
235 private:
236 HDivZeroCheck* const instruction_;
237 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
238};
239
240class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
241 public:
242 LoadClassSlowPathARM64(HLoadClass* cls,
243 HInstruction* at,
244 uint32_t dex_pc,
245 bool do_clinit)
246 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
247 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
248 }
249
250 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
251 LocationSummary* locations = at_->GetLocations();
252 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
253
254 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000255 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000256
257 InvokeRuntimeCallingConvention calling_convention;
258 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
259 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
260 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
261 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000262 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800263 if (do_clinit_) {
264 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t, mirror::ArtMethod*>();
265 } else {
266 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t, mirror::ArtMethod*>();
267 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000268
269 // Move the class to the desired location.
270 Location out = locations->Out();
271 if (out.IsValid()) {
272 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
273 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000274 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000275 }
276
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000277 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000278 __ B(GetExitLabel());
279 }
280
281 private:
282 // The class this slow path will load.
283 HLoadClass* const cls_;
284
285 // The instruction where this slow path is happening.
286 // (Might be the load class or an initialization check).
287 HInstruction* const at_;
288
289 // The dex PC of `at_`.
290 const uint32_t dex_pc_;
291
292 // Whether to initialize the class.
293 const bool do_clinit_;
294
295 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
296};
297
298class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
299 public:
300 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
301
302 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
303 LocationSummary* locations = instruction_->GetLocations();
304 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
305 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
306
307 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000308 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000309
310 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800311 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
312 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000313 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000314 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800315 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000316 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000317 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000318
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000319 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000320 __ B(GetExitLabel());
321 }
322
323 private:
324 HLoadString* const instruction_;
325
326 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
327};
328
Alexandre Rames5319def2014-10-23 10:03:10 +0100329class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
330 public:
331 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
332
Alexandre Rames67555f72014-11-18 10:55:16 +0000333 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
334 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100335 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000336 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000337 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800338 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100339 }
340
341 private:
342 HNullCheck* const instruction_;
343
344 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
345};
346
347class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
348 public:
349 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
350 HBasicBlock* successor)
351 : instruction_(instruction), successor_(successor) {}
352
Alexandre Rames67555f72014-11-18 10:55:16 +0000353 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
354 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100355 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000356 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000357 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000358 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800359 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000360 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000361 if (successor_ == nullptr) {
362 __ B(GetReturnLabel());
363 } else {
364 __ B(arm64_codegen->GetLabelOf(successor_));
365 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100366 }
367
368 vixl::Label* GetReturnLabel() {
369 DCHECK(successor_ == nullptr);
370 return &return_label_;
371 }
372
Alexandre Rames5319def2014-10-23 10:03:10 +0100373 private:
374 HSuspendCheck* const instruction_;
375 // If not null, the block to branch to after the suspend check.
376 HBasicBlock* const successor_;
377
378 // If `successor_` is null, the label to branch to after the suspend check.
379 vixl::Label return_label_;
380
381 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
382};
383
Alexandre Rames67555f72014-11-18 10:55:16 +0000384class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
385 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000386 TypeCheckSlowPathARM64(HInstruction* instruction,
387 Location class_to_check,
388 Location object_class,
389 uint32_t dex_pc)
390 : instruction_(instruction),
391 class_to_check_(class_to_check),
392 object_class_(object_class),
393 dex_pc_(dex_pc) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000394
395 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000396 LocationSummary* locations = instruction_->GetLocations();
397 DCHECK(instruction_->IsCheckCast()
398 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
399 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
400
Alexandre Rames67555f72014-11-18 10:55:16 +0000401 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000402 SaveLiveRegisters(codegen, locations);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000403
404 // We're moving two locations to locations that could overlap, so we need a parallel
405 // move resolver.
406 InvokeRuntimeCallingConvention calling_convention;
407 codegen->EmitParallelMoves(
Nicolas Geoffray90218252015-04-15 11:56:51 +0100408 class_to_check_, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
409 object_class_, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000410
411 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000412 arm64_codegen->InvokeRuntime(
413 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_, this);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000414 Primitive::Type ret_type = instruction_->GetType();
415 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
416 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800417 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
418 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000419 } else {
420 DCHECK(instruction_->IsCheckCast());
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000421 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800422 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000423 }
424
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000425 RestoreLiveRegisters(codegen, locations);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000426 __ B(GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000427 }
428
429 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000430 HInstruction* const instruction_;
431 const Location class_to_check_;
432 const Location object_class_;
433 uint32_t dex_pc_;
434
Alexandre Rames67555f72014-11-18 10:55:16 +0000435 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
436};
437
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700438class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
439 public:
440 explicit DeoptimizationSlowPathARM64(HInstruction* instruction)
441 : instruction_(instruction) {}
442
443 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
444 __ Bind(GetEntryLabel());
445 SaveLiveRegisters(codegen, instruction_->GetLocations());
446 DCHECK(instruction_->IsDeoptimize());
447 HDeoptimize* deoptimize = instruction_->AsDeoptimize();
448 uint32_t dex_pc = deoptimize->GetDexPc();
449 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
450 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize), instruction_, dex_pc, this);
451 }
452
453 private:
454 HInstruction* const instruction_;
455 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
456};
457
Alexandre Rames5319def2014-10-23 10:03:10 +0100458#undef __
459
460Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
461 Location next_location;
462 if (type == Primitive::kPrimVoid) {
463 LOG(FATAL) << "Unreachable type " << type;
464 }
465
Alexandre Rames542361f2015-01-29 16:57:31 +0000466 if (Primitive::IsFloatingPointType(type) &&
467 (fp_index_ < calling_convention.GetNumberOfFpuRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000468 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(fp_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000469 } else if (!Primitive::IsFloatingPointType(type) &&
470 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000471 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
472 } else {
473 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000474 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
475 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100476 }
477
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000478 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000479 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100480 return next_location;
481}
482
Serban Constantinescu579885a2015-02-22 20:51:33 +0000483CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
484 const Arm64InstructionSetFeatures& isa_features,
485 const CompilerOptions& compiler_options)
Alexandre Rames5319def2014-10-23 10:03:10 +0100486 : CodeGenerator(graph,
487 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000488 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000489 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000490 callee_saved_core_registers.list(),
491 callee_saved_fp_registers.list(),
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000492 compiler_options),
Alexandre Rames5319def2014-10-23 10:03:10 +0100493 block_labels_(nullptr),
494 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000495 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000496 move_resolver_(graph->GetArena(), this),
497 isa_features_(isa_features) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000498 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000499 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000500}
Alexandre Rames5319def2014-10-23 10:03:10 +0100501
Alexandre Rames67555f72014-11-18 10:55:16 +0000502#undef __
503#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100504
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000505void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
506 // Ensure we emit the literal pool.
507 __ FinalizeCode();
508 CodeGenerator::Finalize(allocator);
509}
510
Zheng Xuad4450e2015-04-17 18:48:56 +0800511void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
512 // Note: There are 6 kinds of moves:
513 // 1. constant -> GPR/FPR (non-cycle)
514 // 2. constant -> stack (non-cycle)
515 // 3. GPR/FPR -> GPR/FPR
516 // 4. GPR/FPR -> stack
517 // 5. stack -> GPR/FPR
518 // 6. stack -> stack (non-cycle)
519 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
520 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
521 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
522 // dependency.
523 vixl_temps_.Open(GetVIXLAssembler());
524}
525
526void ParallelMoveResolverARM64::FinishEmitNativeCode() {
527 vixl_temps_.Close();
528}
529
530Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
531 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
532 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
533 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
534 Location scratch = GetScratchLocation(kind);
535 if (!scratch.Equals(Location::NoLocation())) {
536 return scratch;
537 }
538 // Allocate from VIXL temp registers.
539 if (kind == Location::kRegister) {
540 scratch = LocationFrom(vixl_temps_.AcquireX());
541 } else {
542 DCHECK(kind == Location::kFpuRegister);
543 scratch = LocationFrom(vixl_temps_.AcquireD());
544 }
545 AddScratchLocation(scratch);
546 return scratch;
547}
548
549void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
550 if (loc.IsRegister()) {
551 vixl_temps_.Release(XRegisterFrom(loc));
552 } else {
553 DCHECK(loc.IsFpuRegister());
554 vixl_temps_.Release(DRegisterFrom(loc));
555 }
556 RemoveScratchLocation(loc);
557}
558
Alexandre Rames3e69f162014-12-10 10:36:50 +0000559void ParallelMoveResolverARM64::EmitMove(size_t index) {
560 MoveOperands* move = moves_.Get(index);
561 codegen_->MoveLocation(move->GetDestination(), move->GetSource());
562}
563
Alexandre Rames5319def2014-10-23 10:03:10 +0100564void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100565 MacroAssembler* masm = GetVIXLAssembler();
566 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000567 __ Bind(&frame_entry_label_);
568
Serban Constantinescu02164b32014-11-13 14:05:07 +0000569 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
570 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100571 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000572 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000573 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000574 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000575 __ Ldr(wzr, MemOperand(temp, 0));
576 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000577 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100578
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000579 if (!HasEmptyFrame()) {
580 int frame_size = GetFrameSize();
581 // Stack layout:
582 // sp[frame_size - 8] : lr.
583 // ... : other preserved core registers.
584 // ... : other preserved fp registers.
585 // ... : reserved frame space.
586 // sp[0] : current method.
587 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100588 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +0800589 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
590 frame_size - GetCoreSpillSize());
591 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
592 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000593 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100594}
595
596void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100597 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +0100598 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000599 if (!HasEmptyFrame()) {
600 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +0800601 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
602 frame_size - FrameEntrySpillSize());
603 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
604 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000605 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100606 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000607 }
David Srbeckyc34dc932015-04-12 09:27:43 +0100608 __ Ret();
609 GetAssembler()->cfi().RestoreState();
610 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +0100611}
612
Zheng Xuda403092015-04-24 17:35:39 +0800613vixl::CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
614 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
615 return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize,
616 core_spill_mask_);
617}
618
619vixl::CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
620 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
621 GetNumberOfFloatingPointRegisters()));
622 return vixl::CPURegList(vixl::CPURegister::kFPRegister, vixl::kDRegSize,
623 fpu_spill_mask_);
624}
625
Alexandre Rames5319def2014-10-23 10:03:10 +0100626void CodeGeneratorARM64::Bind(HBasicBlock* block) {
627 __ Bind(GetLabelOf(block));
628}
629
Alexandre Rames5319def2014-10-23 10:03:10 +0100630void CodeGeneratorARM64::Move(HInstruction* instruction,
631 Location location,
632 HInstruction* move_for) {
633 LocationSummary* locations = instruction->GetLocations();
634 if (locations != nullptr && locations->Out().Equals(location)) {
635 return;
636 }
637
638 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000639 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100640
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000641 if (instruction->IsIntConstant()
642 || instruction->IsLongConstant()
643 || instruction->IsNullConstant()) {
644 int64_t value = GetInt64ValueOf(instruction->AsConstant());
Alexandre Rames5319def2014-10-23 10:03:10 +0100645 if (location.IsRegister()) {
646 Register dst = RegisterFrom(location, type);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000647 DCHECK(((instruction->IsIntConstant() || instruction->IsNullConstant()) && dst.Is32Bits()) ||
Alexandre Rames5319def2014-10-23 10:03:10 +0100648 (instruction->IsLongConstant() && dst.Is64Bits()));
649 __ Mov(dst, value);
650 } else {
651 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000652 UseScratchRegisterScope temps(GetVIXLAssembler());
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000653 Register temp = (instruction->IsIntConstant() || instruction->IsNullConstant())
654 ? temps.AcquireW()
655 : temps.AcquireX();
Alexandre Rames5319def2014-10-23 10:03:10 +0100656 __ Mov(temp, value);
657 __ Str(temp, StackOperandFrom(location));
658 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000659 } else if (instruction->IsTemporary()) {
660 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000661 MoveLocation(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100662 } else if (instruction->IsLoadLocal()) {
663 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Rames542361f2015-01-29 16:57:31 +0000664 if (Primitive::Is64BitType(type)) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000665 MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000666 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000667 MoveLocation(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100668 }
669
670 } else {
671 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000672 MoveLocation(location, locations->Out(), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100673 }
674}
675
Alexandre Rames5319def2014-10-23 10:03:10 +0100676Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
677 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000678
Alexandre Rames5319def2014-10-23 10:03:10 +0100679 switch (type) {
680 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000681 case Primitive::kPrimInt:
682 case Primitive::kPrimFloat:
683 return Location::StackSlot(GetStackSlot(load->GetLocal()));
684
685 case Primitive::kPrimLong:
686 case Primitive::kPrimDouble:
687 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
688
Alexandre Rames5319def2014-10-23 10:03:10 +0100689 case Primitive::kPrimBoolean:
690 case Primitive::kPrimByte:
691 case Primitive::kPrimChar:
692 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100693 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100694 LOG(FATAL) << "Unexpected type " << type;
695 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000696
Alexandre Rames5319def2014-10-23 10:03:10 +0100697 LOG(FATAL) << "Unreachable";
698 return Location::NoLocation();
699}
700
701void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000702 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100703 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000704 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +0100705 vixl::Label done;
706 __ Cbz(value, &done);
707 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
708 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000709 __ Strb(card, MemOperand(card, temp.X()));
Alexandre Rames5319def2014-10-23 10:03:10 +0100710 __ Bind(&done);
711}
712
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000713void CodeGeneratorARM64::SetupBlockedRegisters(bool is_baseline) const {
714 // Blocked core registers:
715 // lr : Runtime reserved.
716 // tr : Runtime reserved.
717 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
718 // ip1 : VIXL core temp.
719 // ip0 : VIXL core temp.
720 //
721 // Blocked fp registers:
722 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +0100723 CPURegList reserved_core_registers = vixl_reserved_core_registers;
724 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100725 while (!reserved_core_registers.IsEmpty()) {
726 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
727 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000728
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000729 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +0800730 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000731 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
732 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000733
734 if (is_baseline) {
735 CPURegList reserved_core_baseline_registers = callee_saved_core_registers;
736 while (!reserved_core_baseline_registers.IsEmpty()) {
737 blocked_core_registers_[reserved_core_baseline_registers.PopLowestIndex().code()] = true;
738 }
739
740 CPURegList reserved_fp_baseline_registers = callee_saved_fp_registers;
741 while (!reserved_fp_baseline_registers.IsEmpty()) {
742 blocked_fpu_registers_[reserved_fp_baseline_registers.PopLowestIndex().code()] = true;
743 }
744 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100745}
746
747Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
748 if (type == Primitive::kPrimVoid) {
749 LOG(FATAL) << "Unreachable type " << type;
750 }
751
Alexandre Rames542361f2015-01-29 16:57:31 +0000752 if (Primitive::IsFloatingPointType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000753 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
754 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100755 return Location::FpuRegisterLocation(reg);
756 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000757 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
758 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100759 return Location::RegisterLocation(reg);
760 }
761}
762
Alexandre Rames3e69f162014-12-10 10:36:50 +0000763size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
764 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
765 __ Str(reg, MemOperand(sp, stack_index));
766 return kArm64WordSize;
767}
768
769size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
770 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
771 __ Ldr(reg, MemOperand(sp, stack_index));
772 return kArm64WordSize;
773}
774
775size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
776 FPRegister reg = FPRegister(reg_id, kDRegSize);
777 __ Str(reg, MemOperand(sp, stack_index));
778 return kArm64WordSize;
779}
780
781size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
782 FPRegister reg = FPRegister(reg_id, kDRegSize);
783 __ Ldr(reg, MemOperand(sp, stack_index));
784 return kArm64WordSize;
785}
786
Alexandre Rames5319def2014-10-23 10:03:10 +0100787void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
788 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
789}
790
791void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
792 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
793}
794
Alexandre Rames67555f72014-11-18 10:55:16 +0000795void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000796 if (constant->IsIntConstant()) {
797 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
798 } else if (constant->IsLongConstant()) {
799 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
800 } else if (constant->IsNullConstant()) {
801 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +0000802 } else if (constant->IsFloatConstant()) {
803 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
804 } else {
805 DCHECK(constant->IsDoubleConstant());
806 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
807 }
808}
809
Alexandre Rames3e69f162014-12-10 10:36:50 +0000810
811static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
812 DCHECK(constant.IsConstant());
813 HConstant* cst = constant.GetConstant();
814 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000815 // Null is mapped to a core W register, which we associate with kPrimInt.
816 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +0000817 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
818 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
819 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
820}
821
822void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000823 if (source.Equals(destination)) {
824 return;
825 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000826
827 // A valid move can always be inferred from the destination and source
828 // locations. When moving from and to a register, the argument type can be
829 // used to generate 32bit instead of 64bit moves. In debug mode we also
830 // checks the coherency of the locations and the type.
831 bool unspecified_type = (type == Primitive::kPrimVoid);
832
833 if (destination.IsRegister() || destination.IsFpuRegister()) {
834 if (unspecified_type) {
835 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
836 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000837 (src_cst != nullptr && (src_cst->IsIntConstant()
838 || src_cst->IsFloatConstant()
839 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000840 // For stack slots and 32bit constants, a 64bit type is appropriate.
841 type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +0000842 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000843 // If the source is a double stack slot or a 64bit constant, a 64bit
844 // type is appropriate. Else the source is a register, and since the
845 // type has not been specified, we chose a 64bit type to force a 64bit
846 // move.
847 type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +0000848 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000849 }
Alexandre Rames542361f2015-01-29 16:57:31 +0000850 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(type)) ||
851 (destination.IsRegister() && !Primitive::IsFloatingPointType(type)));
Alexandre Rames3e69f162014-12-10 10:36:50 +0000852 CPURegister dst = CPURegisterFrom(destination, type);
853 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
854 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
855 __ Ldr(dst, StackOperandFrom(source));
856 } else if (source.IsConstant()) {
857 DCHECK(CoherentConstantAndType(source, type));
858 MoveConstant(dst, source.GetConstant());
859 } else {
860 if (destination.IsRegister()) {
861 __ Mov(Register(dst), RegisterFrom(source, type));
862 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +0800863 DCHECK(destination.IsFpuRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000864 __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
865 }
866 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000867 } else { // The destination is not a register. It must be a stack slot.
868 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
869 if (source.IsRegister() || source.IsFpuRegister()) {
870 if (unspecified_type) {
871 if (source.IsRegister()) {
872 type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
873 } else {
874 type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
875 }
876 }
Alexandre Rames542361f2015-01-29 16:57:31 +0000877 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(type)) &&
878 (source.IsFpuRegister() == Primitive::IsFloatingPointType(type)));
Alexandre Rames3e69f162014-12-10 10:36:50 +0000879 __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
880 } else if (source.IsConstant()) {
881 DCHECK(unspecified_type || CoherentConstantAndType(source, type));
882 UseScratchRegisterScope temps(GetVIXLAssembler());
883 HConstant* src_cst = source.GetConstant();
884 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000885 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000886 temp = temps.AcquireW();
887 } else if (src_cst->IsLongConstant()) {
888 temp = temps.AcquireX();
889 } else if (src_cst->IsFloatConstant()) {
890 temp = temps.AcquireS();
891 } else {
892 DCHECK(src_cst->IsDoubleConstant());
893 temp = temps.AcquireD();
894 }
895 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000896 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000897 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +0000898 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000899 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000900 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000901 // There is generally less pressure on FP registers.
902 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000903 __ Ldr(temp, StackOperandFrom(source));
904 __ Str(temp, StackOperandFrom(destination));
905 }
906 }
907}
908
909void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000910 CPURegister dst,
911 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000912 switch (type) {
913 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +0000914 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000915 break;
916 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +0000917 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000918 break;
919 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +0000920 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000921 break;
922 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +0000923 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000924 break;
925 case Primitive::kPrimInt:
926 case Primitive::kPrimNot:
927 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000928 case Primitive::kPrimFloat:
929 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +0000930 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +0000931 __ Ldr(dst, src);
932 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000933 case Primitive::kPrimVoid:
934 LOG(FATAL) << "Unreachable type " << type;
935 }
936}
937
Calin Juravle77520bc2015-01-12 18:45:46 +0000938void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000939 CPURegister dst,
940 const MemOperand& src) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100941 MacroAssembler* masm = GetVIXLAssembler();
942 BlockPoolsScope block_pools(masm);
943 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000944 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +0000945 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000946
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000947 DCHECK(!src.IsPreIndex());
948 DCHECK(!src.IsPostIndex());
949
950 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800951 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000952 MemOperand base = MemOperand(temp_base);
953 switch (type) {
954 case Primitive::kPrimBoolean:
955 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000956 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000957 break;
958 case Primitive::kPrimByte:
959 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000960 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000961 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
962 break;
963 case Primitive::kPrimChar:
964 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000965 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000966 break;
967 case Primitive::kPrimShort:
968 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000969 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000970 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
971 break;
972 case Primitive::kPrimInt:
973 case Primitive::kPrimNot:
974 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +0000975 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000976 __ Ldar(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000977 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000978 break;
979 case Primitive::kPrimFloat:
980 case Primitive::kPrimDouble: {
981 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +0000982 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000983
984 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
985 __ Ldar(temp, base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000986 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000987 __ Fmov(FPRegister(dst), temp);
988 break;
989 }
990 case Primitive::kPrimVoid:
991 LOG(FATAL) << "Unreachable type " << type;
992 }
993}
994
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000995void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000996 CPURegister src,
997 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000998 switch (type) {
999 case Primitive::kPrimBoolean:
1000 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001001 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001002 break;
1003 case Primitive::kPrimChar:
1004 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001005 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001006 break;
1007 case Primitive::kPrimInt:
1008 case Primitive::kPrimNot:
1009 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001010 case Primitive::kPrimFloat:
1011 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001012 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001013 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001014 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001015 case Primitive::kPrimVoid:
1016 LOG(FATAL) << "Unreachable type " << type;
1017 }
1018}
1019
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001020void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1021 CPURegister src,
1022 const MemOperand& dst) {
1023 UseScratchRegisterScope temps(GetVIXLAssembler());
1024 Register temp_base = temps.AcquireX();
1025
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001026 DCHECK(!dst.IsPreIndex());
1027 DCHECK(!dst.IsPostIndex());
1028
1029 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001030 Operand op = OperandFromMemOperand(dst);
1031 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001032 MemOperand base = MemOperand(temp_base);
1033 switch (type) {
1034 case Primitive::kPrimBoolean:
1035 case Primitive::kPrimByte:
1036 __ Stlrb(Register(src), base);
1037 break;
1038 case Primitive::kPrimChar:
1039 case Primitive::kPrimShort:
1040 __ Stlrh(Register(src), base);
1041 break;
1042 case Primitive::kPrimInt:
1043 case Primitive::kPrimNot:
1044 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001045 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001046 __ Stlr(Register(src), base);
1047 break;
1048 case Primitive::kPrimFloat:
1049 case Primitive::kPrimDouble: {
1050 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001051 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001052
1053 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1054 __ Fmov(temp, FPRegister(src));
1055 __ Stlr(temp, base);
1056 break;
1057 }
1058 case Primitive::kPrimVoid:
1059 LOG(FATAL) << "Unreachable type " << type;
1060 }
1061}
1062
Alexandre Rames67555f72014-11-18 10:55:16 +00001063void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) {
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001064 DCHECK(RequiresCurrentMethod());
Alexandre Rames67555f72014-11-18 10:55:16 +00001065 DCHECK(current_method.IsW());
1066 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1067}
1068
1069void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1070 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001071 uint32_t dex_pc,
1072 SlowPathCode* slow_path) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001073 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001074 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1075 __ Blr(lr);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001076 if (instruction != nullptr) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001077 RecordPcInfo(instruction, dex_pc, slow_path);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001078 DCHECK(instruction->IsSuspendCheck()
1079 || instruction->IsBoundsCheck()
1080 || instruction->IsNullCheck()
1081 || instruction->IsDivZeroCheck()
1082 || !IsLeafMethod());
1083 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001084}
1085
1086void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1087 vixl::Register class_reg) {
1088 UseScratchRegisterScope temps(GetVIXLAssembler());
1089 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001090 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
Serban Constantinescu579885a2015-02-22 20:51:33 +00001091 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001092
Serban Constantinescu02164b32014-11-13 14:05:07 +00001093 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu579885a2015-02-22 20:51:33 +00001094 if (use_acquire_release) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001095 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1096 __ Add(temp, class_reg, status_offset);
1097 __ Ldar(temp, HeapOperand(temp));
1098 __ Cmp(temp, mirror::Class::kStatusInitialized);
1099 __ B(lt, slow_path->GetEntryLabel());
1100 } else {
1101 __ Ldr(temp, HeapOperand(class_reg, status_offset));
1102 __ Cmp(temp, mirror::Class::kStatusInitialized);
1103 __ B(lt, slow_path->GetEntryLabel());
1104 __ Dmb(InnerShareable, BarrierReads);
1105 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001106 __ Bind(slow_path->GetExitLabel());
1107}
Alexandre Rames5319def2014-10-23 10:03:10 +01001108
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001109void InstructionCodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
1110 BarrierType type = BarrierAll;
1111
1112 switch (kind) {
1113 case MemBarrierKind::kAnyAny:
1114 case MemBarrierKind::kAnyStore: {
1115 type = BarrierAll;
1116 break;
1117 }
1118 case MemBarrierKind::kLoadAny: {
1119 type = BarrierReads;
1120 break;
1121 }
1122 case MemBarrierKind::kStoreStore: {
1123 type = BarrierWrites;
1124 break;
1125 }
1126 default:
1127 LOG(FATAL) << "Unexpected memory barrier " << kind;
1128 }
1129 __ Dmb(InnerShareable, type);
1130}
1131
Serban Constantinescu02164b32014-11-13 14:05:07 +00001132void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1133 HBasicBlock* successor) {
1134 SuspendCheckSlowPathARM64* slow_path =
1135 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1136 codegen_->AddSlowPath(slow_path);
1137 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1138 Register temp = temps.AcquireW();
1139
1140 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1141 if (successor == nullptr) {
1142 __ Cbnz(temp, slow_path->GetEntryLabel());
1143 __ Bind(slow_path->GetReturnLabel());
1144 } else {
1145 __ Cbz(temp, codegen_->GetLabelOf(successor));
1146 __ B(slow_path->GetEntryLabel());
1147 // slow_path will return to GetLabelOf(successor).
1148 }
1149}
1150
Alexandre Rames5319def2014-10-23 10:03:10 +01001151InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1152 CodeGeneratorARM64* codegen)
1153 : HGraphVisitor(graph),
1154 assembler_(codegen->GetAssembler()),
1155 codegen_(codegen) {}
1156
1157#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001158 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001159
1160#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1161
1162enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001163 // Using a base helps identify when we hit such breakpoints.
1164 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001165#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1166 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1167#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1168};
1169
1170#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
1171 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001172 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +01001173 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1174 } \
1175 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1176 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1177 locations->SetOut(Location::Any()); \
1178 }
1179 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1180#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1181
1182#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001183#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001184
Alexandre Rames67555f72014-11-18 10:55:16 +00001185void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001186 DCHECK_EQ(instr->InputCount(), 2U);
1187 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1188 Primitive::Type type = instr->GetResultType();
1189 switch (type) {
1190 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001191 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001192 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001193 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001194 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001195 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001196
1197 case Primitive::kPrimFloat:
1198 case Primitive::kPrimDouble:
1199 locations->SetInAt(0, Location::RequiresFpuRegister());
1200 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001201 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001202 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001203
Alexandre Rames5319def2014-10-23 10:03:10 +01001204 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001205 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001206 }
1207}
1208
Alexandre Rames09a99962015-04-15 11:47:56 +01001209void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
1210 LocationSummary* locations =
1211 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1212 locations->SetInAt(0, Location::RequiresRegister());
1213 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1214 locations->SetOut(Location::RequiresFpuRegister());
1215 } else {
1216 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1217 }
1218}
1219
1220void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1221 const FieldInfo& field_info) {
1222 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001223 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001224
1225 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
1226 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
1227
1228 if (field_info.IsVolatile()) {
1229 if (use_acquire_release) {
1230 // NB: LoadAcquire will record the pc info if needed.
1231 codegen_->LoadAcquire(instruction, OutputCPURegister(instruction), field);
1232 } else {
1233 codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
1234 codegen_->MaybeRecordImplicitNullCheck(instruction);
1235 // For IRIW sequential consistency kLoadAny is not sufficient.
1236 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1237 }
1238 } else {
1239 codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
1240 codegen_->MaybeRecordImplicitNullCheck(instruction);
1241 }
1242}
1243
1244void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1245 LocationSummary* locations =
1246 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1247 locations->SetInAt(0, Location::RequiresRegister());
1248 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1249 locations->SetInAt(1, Location::RequiresFpuRegister());
1250 } else {
1251 locations->SetInAt(1, Location::RequiresRegister());
1252 }
1253}
1254
1255void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
1256 const FieldInfo& field_info) {
1257 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001258 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001259
1260 Register obj = InputRegisterAt(instruction, 0);
1261 CPURegister value = InputCPURegisterAt(instruction, 1);
1262 Offset offset = field_info.GetFieldOffset();
1263 Primitive::Type field_type = field_info.GetFieldType();
1264 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
1265
1266 if (field_info.IsVolatile()) {
1267 if (use_acquire_release) {
1268 codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
1269 codegen_->MaybeRecordImplicitNullCheck(instruction);
1270 } else {
1271 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
1272 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1273 codegen_->MaybeRecordImplicitNullCheck(instruction);
1274 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1275 }
1276 } else {
1277 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1278 codegen_->MaybeRecordImplicitNullCheck(instruction);
1279 }
1280
1281 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
1282 codegen_->MarkGCCard(obj, Register(value));
1283 }
1284}
1285
Alexandre Rames67555f72014-11-18 10:55:16 +00001286void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001287 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001288
1289 switch (type) {
1290 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001291 case Primitive::kPrimLong: {
1292 Register dst = OutputRegister(instr);
1293 Register lhs = InputRegisterAt(instr, 0);
1294 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001295 if (instr->IsAdd()) {
1296 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001297 } else if (instr->IsAnd()) {
1298 __ And(dst, lhs, rhs);
1299 } else if (instr->IsOr()) {
1300 __ Orr(dst, lhs, rhs);
1301 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001302 __ Sub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001303 } else {
1304 DCHECK(instr->IsXor());
1305 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001306 }
1307 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001308 }
1309 case Primitive::kPrimFloat:
1310 case Primitive::kPrimDouble: {
1311 FPRegister dst = OutputFPRegister(instr);
1312 FPRegister lhs = InputFPRegisterAt(instr, 0);
1313 FPRegister rhs = InputFPRegisterAt(instr, 1);
1314 if (instr->IsAdd()) {
1315 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001316 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001317 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001318 } else {
1319 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001320 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001321 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001322 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001323 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001324 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001325 }
1326}
1327
Serban Constantinescu02164b32014-11-13 14:05:07 +00001328void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1329 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1330
1331 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1332 Primitive::Type type = instr->GetResultType();
1333 switch (type) {
1334 case Primitive::kPrimInt:
1335 case Primitive::kPrimLong: {
1336 locations->SetInAt(0, Location::RequiresRegister());
1337 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1338 locations->SetOut(Location::RequiresRegister());
1339 break;
1340 }
1341 default:
1342 LOG(FATAL) << "Unexpected shift type " << type;
1343 }
1344}
1345
1346void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1347 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1348
1349 Primitive::Type type = instr->GetType();
1350 switch (type) {
1351 case Primitive::kPrimInt:
1352 case Primitive::kPrimLong: {
1353 Register dst = OutputRegister(instr);
1354 Register lhs = InputRegisterAt(instr, 0);
1355 Operand rhs = InputOperandAt(instr, 1);
1356 if (rhs.IsImmediate()) {
1357 uint32_t shift_value = (type == Primitive::kPrimInt)
1358 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1359 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1360 if (instr->IsShl()) {
1361 __ Lsl(dst, lhs, shift_value);
1362 } else if (instr->IsShr()) {
1363 __ Asr(dst, lhs, shift_value);
1364 } else {
1365 __ Lsr(dst, lhs, shift_value);
1366 }
1367 } else {
1368 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1369
1370 if (instr->IsShl()) {
1371 __ Lsl(dst, lhs, rhs_reg);
1372 } else if (instr->IsShr()) {
1373 __ Asr(dst, lhs, rhs_reg);
1374 } else {
1375 __ Lsr(dst, lhs, rhs_reg);
1376 }
1377 }
1378 break;
1379 }
1380 default:
1381 LOG(FATAL) << "Unexpected shift operation type " << type;
1382 }
1383}
1384
Alexandre Rames5319def2014-10-23 10:03:10 +01001385void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001386 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001387}
1388
1389void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001390 HandleBinaryOp(instruction);
1391}
1392
1393void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1394 HandleBinaryOp(instruction);
1395}
1396
1397void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1398 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001399}
1400
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001401void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1402 LocationSummary* locations =
1403 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1404 locations->SetInAt(0, Location::RequiresRegister());
1405 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01001406 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1407 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1408 } else {
1409 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1410 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001411}
1412
1413void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1414 LocationSummary* locations = instruction->GetLocations();
1415 Primitive::Type type = instruction->GetType();
1416 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001417 Location index = locations->InAt(1);
1418 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001419 MemOperand source = HeapOperand(obj);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001420 MacroAssembler* masm = GetVIXLAssembler();
1421 UseScratchRegisterScope temps(masm);
1422 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001423
1424 if (index.IsConstant()) {
1425 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001426 source = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001427 } else {
1428 Register temp = temps.AcquireSameSizeAs(obj);
1429 Register index_reg = RegisterFrom(index, Primitive::kPrimInt);
1430 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001431 source = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001432 }
1433
Alexandre Rames67555f72014-11-18 10:55:16 +00001434 codegen_->Load(type, OutputCPURegister(instruction), source);
Calin Juravle77520bc2015-01-12 18:45:46 +00001435 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001436}
1437
Alexandre Rames5319def2014-10-23 10:03:10 +01001438void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1439 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1440 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001441 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001442}
1443
1444void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001445 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001446 __ Ldr(OutputRegister(instruction),
1447 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
Calin Juravle77520bc2015-01-12 18:45:46 +00001448 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001449}
1450
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001451void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Alexandre Rames97833a02015-04-16 15:07:12 +01001452 if (instruction->NeedsTypeCheck()) {
1453 LocationSummary* locations =
1454 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001455 InvokeRuntimeCallingConvention calling_convention;
1456 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1457 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1458 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1459 } else {
Alexandre Rames97833a02015-04-16 15:07:12 +01001460 LocationSummary* locations =
1461 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001462 locations->SetInAt(0, Location::RequiresRegister());
1463 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01001464 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
1465 locations->SetInAt(2, Location::RequiresFpuRegister());
1466 } else {
1467 locations->SetInAt(2, Location::RequiresRegister());
1468 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001469 }
1470}
1471
1472void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1473 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01001474 LocationSummary* locations = instruction->GetLocations();
1475 bool needs_runtime_call = locations->WillCall();
1476
1477 if (needs_runtime_call) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001478 codegen_->InvokeRuntime(
1479 QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001480 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001481 } else {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001482 Register obj = InputRegisterAt(instruction, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001483 CPURegister value = InputCPURegisterAt(instruction, 2);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001484 Location index = locations->InAt(1);
1485 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001486 MemOperand destination = HeapOperand(obj);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001487 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001488 BlockPoolsScope block_pools(masm);
Alexandre Rames97833a02015-04-16 15:07:12 +01001489 {
1490 // We use a block to end the scratch scope before the write barrier, thus
1491 // freeing the temporary registers so they can be used in `MarkGCCard`.
1492 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001493
Alexandre Rames97833a02015-04-16 15:07:12 +01001494 if (index.IsConstant()) {
1495 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
1496 destination = HeapOperand(obj, offset);
1497 } else {
1498 Register temp = temps.AcquireSameSizeAs(obj);
1499 Register index_reg = InputRegisterAt(instruction, 1);
1500 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type)));
1501 destination = HeapOperand(temp, offset);
1502 }
1503
1504 codegen_->Store(value_type, value, destination);
1505 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001506 }
Alexandre Rames97833a02015-04-16 15:07:12 +01001507 if (CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue())) {
1508 codegen_->MarkGCCard(obj, value.W());
1509 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001510 }
1511}
1512
Alexandre Rames67555f72014-11-18 10:55:16 +00001513void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1514 LocationSummary* locations =
1515 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1516 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00001517 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00001518 if (instruction->HasUses()) {
1519 locations->SetOut(Location::SameAsFirstInput());
1520 }
1521}
1522
1523void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001524 LocationSummary* locations = instruction->GetLocations();
1525 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1526 instruction, locations->InAt(0), locations->InAt(1));
Alexandre Rames67555f72014-11-18 10:55:16 +00001527 codegen_->AddSlowPath(slow_path);
1528
1529 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1530 __ B(slow_path->GetEntryLabel(), hs);
1531}
1532
1533void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
1534 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1535 instruction, LocationSummary::kCallOnSlowPath);
1536 locations->SetInAt(0, Location::RequiresRegister());
1537 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001538 locations->AddTemp(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001539}
1540
1541void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001542 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +00001543 Register obj = InputRegisterAt(instruction, 0);;
1544 Register cls = InputRegisterAt(instruction, 1);;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001545 Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
Alexandre Rames67555f72014-11-18 10:55:16 +00001546
Alexandre Rames3e69f162014-12-10 10:36:50 +00001547 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1548 instruction, locations->InAt(1), LocationFrom(obj_cls), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001549 codegen_->AddSlowPath(slow_path);
1550
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01001551 // Avoid null check if we know obj is not null.
1552 if (instruction->MustDoNullCheck()) {
1553 __ Cbz(obj, slow_path->GetExitLabel());
1554 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001555 // Compare the class of `obj` with `cls`.
Alexandre Rames3e69f162014-12-10 10:36:50 +00001556 __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
1557 __ Cmp(obj_cls, cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00001558 __ B(ne, slow_path->GetEntryLabel());
1559 __ Bind(slow_path->GetExitLabel());
1560}
1561
1562void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1563 LocationSummary* locations =
1564 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1565 locations->SetInAt(0, Location::RequiresRegister());
1566 if (check->HasUses()) {
1567 locations->SetOut(Location::SameAsFirstInput());
1568 }
1569}
1570
1571void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1572 // We assume the class is not null.
1573 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1574 check->GetLoadClass(), check, check->GetDexPc(), true);
1575 codegen_->AddSlowPath(slow_path);
1576 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1577}
1578
Serban Constantinescu02164b32014-11-13 14:05:07 +00001579void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001580 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00001581 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1582 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001583 switch (in_type) {
1584 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001585 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001586 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001587 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1588 break;
1589 }
1590 case Primitive::kPrimFloat:
1591 case Primitive::kPrimDouble: {
1592 locations->SetInAt(0, Location::RequiresFpuRegister());
Alexandre Rames93415462015-02-17 15:08:20 +00001593 HInstruction* right = compare->InputAt(1);
1594 if ((right->IsFloatConstant() && (right->AsFloatConstant()->GetValue() == 0.0f)) ||
1595 (right->IsDoubleConstant() && (right->AsDoubleConstant()->GetValue() == 0.0))) {
1596 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1597 } else {
1598 locations->SetInAt(1, Location::RequiresFpuRegister());
1599 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001600 locations->SetOut(Location::RequiresRegister());
1601 break;
1602 }
1603 default:
1604 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1605 }
1606}
1607
1608void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1609 Primitive::Type in_type = compare->InputAt(0)->GetType();
1610
1611 // 0 if: left == right
1612 // 1 if: left > right
1613 // -1 if: left < right
1614 switch (in_type) {
1615 case Primitive::kPrimLong: {
1616 Register result = OutputRegister(compare);
1617 Register left = InputRegisterAt(compare, 0);
1618 Operand right = InputOperandAt(compare, 1);
1619
1620 __ Cmp(left, right);
1621 __ Cset(result, ne);
1622 __ Cneg(result, result, lt);
1623 break;
1624 }
1625 case Primitive::kPrimFloat:
1626 case Primitive::kPrimDouble: {
1627 Register result = OutputRegister(compare);
1628 FPRegister left = InputFPRegisterAt(compare, 0);
Alexandre Rames93415462015-02-17 15:08:20 +00001629 if (compare->GetLocations()->InAt(1).IsConstant()) {
1630 if (kIsDebugBuild) {
1631 HInstruction* right = compare->GetLocations()->InAt(1).GetConstant();
1632 DCHECK((right->IsFloatConstant() && (right->AsFloatConstant()->GetValue() == 0.0f)) ||
1633 (right->IsDoubleConstant() && (right->AsDoubleConstant()->GetValue() == 0.0)));
1634 }
1635 // 0.0 is the only immediate that can be encoded directly in a FCMP instruction.
1636 __ Fcmp(left, 0.0);
1637 } else {
1638 __ Fcmp(left, InputFPRegisterAt(compare, 1));
1639 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001640 if (compare->IsGtBias()) {
1641 __ Cset(result, ne);
1642 } else {
1643 __ Csetm(result, ne);
1644 }
1645 __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
Alexandre Rames5319def2014-10-23 10:03:10 +01001646 break;
1647 }
1648 default:
1649 LOG(FATAL) << "Unimplemented compare type " << in_type;
1650 }
1651}
1652
1653void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1654 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1655 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001656 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames5319def2014-10-23 10:03:10 +01001657 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001658 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001659 }
1660}
1661
1662void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1663 if (!instruction->NeedsMaterialization()) {
1664 return;
1665 }
1666
1667 LocationSummary* locations = instruction->GetLocations();
1668 Register lhs = InputRegisterAt(instruction, 0);
1669 Operand rhs = InputOperandAt(instruction, 1);
1670 Register res = RegisterFrom(locations->Out(), instruction->GetType());
1671 Condition cond = ARM64Condition(instruction->GetCondition());
1672
1673 __ Cmp(lhs, rhs);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001674 __ Cset(res, cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001675}
1676
1677#define FOR_EACH_CONDITION_INSTRUCTION(M) \
1678 M(Equal) \
1679 M(NotEqual) \
1680 M(LessThan) \
1681 M(LessThanOrEqual) \
1682 M(GreaterThan) \
1683 M(GreaterThanOrEqual)
1684#define DEFINE_CONDITION_VISITORS(Name) \
1685void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
1686void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1687FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00001688#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01001689#undef FOR_EACH_CONDITION_INSTRUCTION
1690
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001691void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1692 LocationSummary* locations =
1693 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1694 switch (div->GetResultType()) {
1695 case Primitive::kPrimInt:
1696 case Primitive::kPrimLong:
1697 locations->SetInAt(0, Location::RequiresRegister());
1698 locations->SetInAt(1, Location::RequiresRegister());
1699 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1700 break;
1701
1702 case Primitive::kPrimFloat:
1703 case Primitive::kPrimDouble:
1704 locations->SetInAt(0, Location::RequiresFpuRegister());
1705 locations->SetInAt(1, Location::RequiresFpuRegister());
1706 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1707 break;
1708
1709 default:
1710 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1711 }
1712}
1713
1714void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1715 Primitive::Type type = div->GetResultType();
1716 switch (type) {
1717 case Primitive::kPrimInt:
1718 case Primitive::kPrimLong:
1719 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
1720 break;
1721
1722 case Primitive::kPrimFloat:
1723 case Primitive::kPrimDouble:
1724 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1725 break;
1726
1727 default:
1728 LOG(FATAL) << "Unexpected div type " << type;
1729 }
1730}
1731
Alexandre Rames67555f72014-11-18 10:55:16 +00001732void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1733 LocationSummary* locations =
1734 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1735 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1736 if (instruction->HasUses()) {
1737 locations->SetOut(Location::SameAsFirstInput());
1738 }
1739}
1740
1741void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1742 SlowPathCodeARM64* slow_path =
1743 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
1744 codegen_->AddSlowPath(slow_path);
1745 Location value = instruction->GetLocations()->InAt(0);
1746
Alexandre Rames3e69f162014-12-10 10:36:50 +00001747 Primitive::Type type = instruction->GetType();
1748
1749 if ((type != Primitive::kPrimInt) && (type != Primitive::kPrimLong)) {
1750 LOG(FATAL) << "Unexpected type " << type << "for DivZeroCheck.";
1751 return;
1752 }
1753
Alexandre Rames67555f72014-11-18 10:55:16 +00001754 if (value.IsConstant()) {
1755 int64_t divisor = Int64ConstantFrom(value);
1756 if (divisor == 0) {
1757 __ B(slow_path->GetEntryLabel());
1758 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001759 // A division by a non-null constant is valid. We don't need to perform
1760 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00001761 }
1762 } else {
1763 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
1764 }
1765}
1766
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001767void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1768 LocationSummary* locations =
1769 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1770 locations->SetOut(Location::ConstantLocation(constant));
1771}
1772
1773void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1774 UNUSED(constant);
1775 // Will be generated at use site.
1776}
1777
Alexandre Rames5319def2014-10-23 10:03:10 +01001778void LocationsBuilderARM64::VisitExit(HExit* exit) {
1779 exit->SetLocations(nullptr);
1780}
1781
1782void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001783 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +01001784}
1785
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001786void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
1787 LocationSummary* locations =
1788 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1789 locations->SetOut(Location::ConstantLocation(constant));
1790}
1791
1792void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
1793 UNUSED(constant);
1794 // Will be generated at use site.
1795}
1796
Alexandre Rames5319def2014-10-23 10:03:10 +01001797void LocationsBuilderARM64::VisitGoto(HGoto* got) {
1798 got->SetLocations(nullptr);
1799}
1800
1801void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
1802 HBasicBlock* successor = got->GetSuccessor();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001803 DCHECK(!successor->IsExitBlock());
1804 HBasicBlock* block = got->GetBlock();
1805 HInstruction* previous = got->GetPrevious();
1806 HLoopInformation* info = block->GetLoopInformation();
1807
David Brazdil46e2a392015-03-16 17:31:52 +00001808 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001809 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
1810 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1811 return;
1812 }
1813 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1814 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1815 }
1816 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001817 __ B(codegen_->GetLabelOf(successor));
1818 }
1819}
1820
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001821void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
1822 vixl::Label* true_target,
1823 vixl::Label* false_target,
1824 vixl::Label* always_true_target) {
1825 HInstruction* cond = instruction->InputAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001826 HCondition* condition = cond->AsCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01001827
Serban Constantinescu02164b32014-11-13 14:05:07 +00001828 if (cond->IsIntConstant()) {
1829 int32_t cond_value = cond->AsIntConstant()->GetValue();
1830 if (cond_value == 1) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001831 if (always_true_target != nullptr) {
1832 __ B(always_true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001833 }
1834 return;
1835 } else {
1836 DCHECK_EQ(cond_value, 0);
1837 }
1838 } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001839 // The condition instruction has been materialized, compare the output to 0.
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001840 Location cond_val = instruction->GetLocations()->InAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001841 DCHECK(cond_val.IsRegister());
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001842 __ Cbnz(InputRegisterAt(instruction, 0), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001843 } else {
1844 // The condition instruction has not been materialized, use its inputs as
1845 // the comparison and its condition as the branch condition.
1846 Register lhs = InputRegisterAt(condition, 0);
1847 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001848 Condition arm64_cond = ARM64Condition(condition->GetCondition());
Alexandre Rames4388dcc2015-02-03 10:28:33 +00001849 if ((arm64_cond != gt && arm64_cond != le) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
1850 switch (arm64_cond) {
1851 case eq:
1852 __ Cbz(lhs, true_target);
1853 break;
1854 case ne:
1855 __ Cbnz(lhs, true_target);
1856 break;
1857 case lt:
1858 // Test the sign bit and branch accordingly.
1859 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, true_target);
1860 break;
1861 case ge:
1862 // Test the sign bit and branch accordingly.
1863 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, true_target);
1864 break;
1865 default:
1866 // Without the `static_cast` the compiler throws an error for
1867 // `-Werror=sign-promo`.
1868 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001869 }
1870 } else {
1871 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001872 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001873 }
1874 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001875 if (false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001876 __ B(false_target);
1877 }
1878}
1879
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001880void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
1881 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1882 HInstruction* cond = if_instr->InputAt(0);
1883 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
1884 locations->SetInAt(0, Location::RequiresRegister());
1885 }
1886}
1887
1888void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
1889 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1890 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1891 vixl::Label* always_true_target = true_target;
1892 if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
1893 if_instr->IfTrueSuccessor())) {
1894 always_true_target = nullptr;
1895 }
1896 if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
1897 if_instr->IfFalseSuccessor())) {
1898 false_target = nullptr;
1899 }
1900 GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
1901}
1902
1903void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
1904 LocationSummary* locations = new (GetGraph()->GetArena())
1905 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
1906 HInstruction* cond = deoptimize->InputAt(0);
1907 DCHECK(cond->IsCondition());
1908 if (cond->AsCondition()->NeedsMaterialization()) {
1909 locations->SetInAt(0, Location::RequiresRegister());
1910 }
1911}
1912
1913void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
1914 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
1915 DeoptimizationSlowPathARM64(deoptimize);
1916 codegen_->AddSlowPath(slow_path);
1917 vixl::Label* slow_path_entry = slow_path->GetEntryLabel();
1918 GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
1919}
1920
Alexandre Rames5319def2014-10-23 10:03:10 +01001921void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001922 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001923}
1924
1925void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001926 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01001927}
1928
1929void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001930 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001931}
1932
1933void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001934 HandleFieldSet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01001935}
1936
Alexandre Rames67555f72014-11-18 10:55:16 +00001937void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
1938 LocationSummary::CallKind call_kind =
1939 instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
1940 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
1941 locations->SetInAt(0, Location::RequiresRegister());
1942 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001943 // The output does overlap inputs.
1944 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexandre Rames67555f72014-11-18 10:55:16 +00001945}
1946
1947void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
1948 LocationSummary* locations = instruction->GetLocations();
1949 Register obj = InputRegisterAt(instruction, 0);;
1950 Register cls = InputRegisterAt(instruction, 1);;
1951 Register out = OutputRegister(instruction);
1952
1953 vixl::Label done;
1954
1955 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01001956 // Avoid null check if we know `obj` is not null.
1957 if (instruction->MustDoNullCheck()) {
1958 __ Mov(out, 0);
1959 __ Cbz(obj, &done);
1960 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001961
1962 // Compare the class of `obj` with `cls`.
Serban Constantinescu02164b32014-11-13 14:05:07 +00001963 __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
Alexandre Rames67555f72014-11-18 10:55:16 +00001964 __ Cmp(out, cls);
1965 if (instruction->IsClassFinal()) {
1966 // Classes must be equal for the instanceof to succeed.
1967 __ Cset(out, eq);
1968 } else {
1969 // If the classes are not equal, we go into a slow path.
1970 DCHECK(locations->OnlyCallsOnSlowPath());
1971 SlowPathCodeARM64* slow_path =
Alexandre Rames3e69f162014-12-10 10:36:50 +00001972 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1973 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001974 codegen_->AddSlowPath(slow_path);
1975 __ B(ne, slow_path->GetEntryLabel());
1976 __ Mov(out, 1);
1977 __ Bind(slow_path->GetExitLabel());
1978 }
1979
1980 __ Bind(&done);
1981}
1982
Alexandre Rames5319def2014-10-23 10:03:10 +01001983void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
1984 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1985 locations->SetOut(Location::ConstantLocation(constant));
1986}
1987
1988void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
1989 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001990 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001991}
1992
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001993void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
1994 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1995 locations->SetOut(Location::ConstantLocation(constant));
1996}
1997
1998void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant) {
1999 // Will be generated at use site.
2000 UNUSED(constant);
2001}
2002
Alexandre Rames5319def2014-10-23 10:03:10 +01002003void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
2004 LocationSummary* locations =
2005 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
2006 locations->AddTemp(LocationFrom(x0));
2007
2008 InvokeDexCallingConventionVisitor calling_convention_visitor;
2009 for (size_t i = 0; i < invoke->InputCount(); i++) {
2010 HInstruction* input = invoke->InputAt(i);
2011 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
2012 }
2013
2014 Primitive::Type return_type = invoke->GetType();
2015 if (return_type != Primitive::kPrimVoid) {
2016 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
2017 }
2018}
2019
Alexandre Rames67555f72014-11-18 10:55:16 +00002020void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
2021 HandleInvoke(invoke);
2022}
2023
2024void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
2025 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
2026 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
2027 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
2028 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
2029 Location receiver = invoke->GetLocations()->InAt(0);
2030 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00002031 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00002032
2033 // The register ip1 is required to be used for the hidden argument in
2034 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002035 MacroAssembler* masm = GetVIXLAssembler();
2036 UseScratchRegisterScope scratch_scope(masm);
2037 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00002038 scratch_scope.Exclude(ip1);
2039 __ Mov(ip1, invoke->GetDexMethodIndex());
2040
2041 // temp = object->GetClass();
2042 if (receiver.IsStackSlot()) {
2043 __ Ldr(temp, StackOperandFrom(receiver));
2044 __ Ldr(temp, HeapOperand(temp, class_offset));
2045 } else {
2046 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
2047 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002048 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00002049 // temp = temp->GetImtEntryAt(method_offset);
2050 __ Ldr(temp, HeapOperand(temp, method_offset));
2051 // lr = temp->GetEntryPoint();
2052 __ Ldr(lr, HeapOperand(temp, entry_point));
2053 // lr();
2054 __ Blr(lr);
2055 DCHECK(!codegen_->IsLeafMethod());
2056 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2057}
2058
2059void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08002060 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
2061 if (intrinsic.TryDispatch(invoke)) {
2062 return;
2063 }
2064
Alexandre Rames67555f72014-11-18 10:55:16 +00002065 HandleInvoke(invoke);
2066}
2067
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002068void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08002069 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
2070 if (intrinsic.TryDispatch(invoke)) {
2071 return;
2072 }
2073
Alexandre Rames67555f72014-11-18 10:55:16 +00002074 HandleInvoke(invoke);
2075}
2076
Andreas Gampe878d58c2015-01-15 23:24:00 -08002077static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
2078 if (invoke->GetLocations()->Intrinsified()) {
2079 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
2080 intrinsic.Dispatch(invoke);
2081 return true;
2082 }
2083 return false;
2084}
2085
2086void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
2087 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
2088 DCHECK(temp.Is(kArtMethodRegister));
Alexandre Rames5319def2014-10-23 10:03:10 +01002089 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
Andreas Gampe878d58c2015-01-15 23:24:00 -08002090 invoke->GetDexMethodIndex() * kHeapRefSize;
Alexandre Rames5319def2014-10-23 10:03:10 +01002091
2092 // TODO: Implement all kinds of calls:
2093 // 1) boot -> boot
2094 // 2) app -> boot
2095 // 3) app -> app
2096 //
2097 // Currently we implement the app -> app logic, which looks up in the resolve cache.
2098
Nicolas Geoffray0a299b92015-01-29 11:39:44 +00002099 // temp = method;
2100 LoadCurrentMethod(temp);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002101 if (!invoke->IsRecursive()) {
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002102 // temp = temp->dex_cache_resolved_methods_;
2103 __ Ldr(temp, HeapOperand(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset()));
2104 // temp = temp[index_in_cache];
2105 __ Ldr(temp, HeapOperand(temp, index_in_cache));
2106 // lr = temp->entry_point_from_quick_compiled_code_;
2107 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
2108 kArm64WordSize)));
2109 // lr();
2110 __ Blr(lr);
2111 } else {
2112 __ Bl(&frame_entry_label_);
2113 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002114
Andreas Gampe878d58c2015-01-15 23:24:00 -08002115 DCHECK(!IsLeafMethod());
2116}
2117
2118void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
2119 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2120 return;
2121 }
2122
Alexandre Ramesd921d642015-04-16 15:07:16 +01002123 BlockPoolsScope block_pools(GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -08002124 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
2125 codegen_->GenerateStaticOrDirectCall(invoke, temp);
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002126 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01002127}
2128
2129void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08002130 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2131 return;
2132 }
2133
Alexandre Rames5319def2014-10-23 10:03:10 +01002134 LocationSummary* locations = invoke->GetLocations();
2135 Location receiver = locations->InAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002136 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002137 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
2138 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
2139 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00002140 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames5319def2014-10-23 10:03:10 +01002141
Alexandre Ramesd921d642015-04-16 15:07:16 +01002142 BlockPoolsScope block_pools(GetVIXLAssembler());
2143
Alexandre Rames5319def2014-10-23 10:03:10 +01002144 // temp = object->GetClass();
2145 if (receiver.IsStackSlot()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002146 __ Ldr(temp, MemOperand(sp, receiver.GetStackIndex()));
2147 __ Ldr(temp, HeapOperand(temp, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002148 } else {
2149 DCHECK(receiver.IsRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002150 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002151 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002152 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames5319def2014-10-23 10:03:10 +01002153 // temp = temp->GetMethodAt(method_offset);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002154 __ Ldr(temp, HeapOperand(temp, method_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002155 // lr = temp->GetEntryPoint();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002156 __ Ldr(lr, HeapOperand(temp, entry_point.SizeValue()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002157 // lr();
2158 __ Blr(lr);
2159 DCHECK(!codegen_->IsLeafMethod());
2160 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2161}
2162
Alexandre Rames67555f72014-11-18 10:55:16 +00002163void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
2164 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
2165 : LocationSummary::kNoCall;
2166 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2167 locations->SetOut(Location::RequiresRegister());
2168}
2169
2170void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
2171 Register out = OutputRegister(cls);
2172 if (cls->IsReferrersClass()) {
2173 DCHECK(!cls->CanCallRuntime());
2174 DCHECK(!cls->MustGenerateClinitCheck());
2175 codegen_->LoadCurrentMethod(out);
2176 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2177 } else {
2178 DCHECK(cls->CanCallRuntime());
2179 codegen_->LoadCurrentMethod(out);
2180 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002181 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002182
2183 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2184 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2185 codegen_->AddSlowPath(slow_path);
2186 __ Cbz(out, slow_path->GetEntryLabel());
2187 if (cls->MustGenerateClinitCheck()) {
2188 GenerateClassInitializationCheck(slow_path, out);
2189 } else {
2190 __ Bind(slow_path->GetExitLabel());
2191 }
2192 }
2193}
2194
2195void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
2196 LocationSummary* locations =
2197 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2198 locations->SetOut(Location::RequiresRegister());
2199}
2200
2201void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
2202 MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
2203 __ Ldr(OutputRegister(instruction), exception);
2204 __ Str(wzr, exception);
2205}
2206
Alexandre Rames5319def2014-10-23 10:03:10 +01002207void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
2208 load->SetLocations(nullptr);
2209}
2210
2211void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
2212 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002213 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01002214}
2215
Alexandre Rames67555f72014-11-18 10:55:16 +00002216void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
2217 LocationSummary* locations =
2218 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2219 locations->SetOut(Location::RequiresRegister());
2220}
2221
2222void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
2223 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
2224 codegen_->AddSlowPath(slow_path);
2225
2226 Register out = OutputRegister(load);
2227 codegen_->LoadCurrentMethod(out);
Mathieu Chartiereace4582014-11-24 18:29:54 -08002228 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2229 __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002230 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002231 __ Cbz(out, slow_path->GetEntryLabel());
2232 __ Bind(slow_path->GetExitLabel());
2233}
2234
Alexandre Rames5319def2014-10-23 10:03:10 +01002235void LocationsBuilderARM64::VisitLocal(HLocal* local) {
2236 local->SetLocations(nullptr);
2237}
2238
2239void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
2240 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
2241}
2242
2243void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
2244 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2245 locations->SetOut(Location::ConstantLocation(constant));
2246}
2247
2248void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
2249 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002250 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01002251}
2252
Alexandre Rames67555f72014-11-18 10:55:16 +00002253void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2254 LocationSummary* locations =
2255 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2256 InvokeRuntimeCallingConvention calling_convention;
2257 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2258}
2259
2260void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2261 codegen_->InvokeRuntime(instruction->IsEnter()
2262 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2263 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002264 instruction->GetDexPc(),
2265 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002266 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002267}
2268
Alexandre Rames42d641b2014-10-27 14:00:51 +00002269void LocationsBuilderARM64::VisitMul(HMul* mul) {
2270 LocationSummary* locations =
2271 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2272 switch (mul->GetResultType()) {
2273 case Primitive::kPrimInt:
2274 case Primitive::kPrimLong:
2275 locations->SetInAt(0, Location::RequiresRegister());
2276 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002277 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002278 break;
2279
2280 case Primitive::kPrimFloat:
2281 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002282 locations->SetInAt(0, Location::RequiresFpuRegister());
2283 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002284 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002285 break;
2286
2287 default:
2288 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2289 }
2290}
2291
2292void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
2293 switch (mul->GetResultType()) {
2294 case Primitive::kPrimInt:
2295 case Primitive::kPrimLong:
2296 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2297 break;
2298
2299 case Primitive::kPrimFloat:
2300 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002301 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00002302 break;
2303
2304 default:
2305 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2306 }
2307}
2308
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002309void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
2310 LocationSummary* locations =
2311 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2312 switch (neg->GetResultType()) {
2313 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00002314 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002315 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00002316 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002317 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002318
2319 case Primitive::kPrimFloat:
2320 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002321 locations->SetInAt(0, Location::RequiresFpuRegister());
2322 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002323 break;
2324
2325 default:
2326 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2327 }
2328}
2329
2330void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
2331 switch (neg->GetResultType()) {
2332 case Primitive::kPrimInt:
2333 case Primitive::kPrimLong:
2334 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
2335 break;
2336
2337 case Primitive::kPrimFloat:
2338 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002339 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002340 break;
2341
2342 default:
2343 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2344 }
2345}
2346
2347void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
2348 LocationSummary* locations =
2349 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2350 InvokeRuntimeCallingConvention calling_convention;
2351 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002352 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002353 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002354 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2355 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2356 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002357}
2358
2359void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
2360 LocationSummary* locations = instruction->GetLocations();
2361 InvokeRuntimeCallingConvention calling_convention;
2362 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2363 DCHECK(type_index.Is(w0));
2364 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002365 DCHECK(current_method.Is(w2));
Alexandre Rames67555f72014-11-18 10:55:16 +00002366 codegen_->LoadCurrentMethod(current_method);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002367 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002368 codegen_->InvokeRuntime(
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +00002369 GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
2370 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002371 instruction->GetDexPc(),
2372 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002373 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2374 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002375}
2376
Alexandre Rames5319def2014-10-23 10:03:10 +01002377void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
2378 LocationSummary* locations =
2379 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2380 InvokeRuntimeCallingConvention calling_convention;
2381 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2382 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2383 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002384 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002385}
2386
2387void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
2388 LocationSummary* locations = instruction->GetLocations();
2389 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2390 DCHECK(type_index.Is(w0));
2391 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2392 DCHECK(current_method.Is(w1));
Alexandre Rames67555f72014-11-18 10:55:16 +00002393 codegen_->LoadCurrentMethod(current_method);
Alexandre Rames5319def2014-10-23 10:03:10 +01002394 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002395 codegen_->InvokeRuntime(
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +00002396 GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
2397 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002398 instruction->GetDexPc(),
2399 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002400 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002401}
2402
2403void LocationsBuilderARM64::VisitNot(HNot* instruction) {
2404 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00002405 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002406 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002407}
2408
2409void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00002410 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002411 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002412 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01002413 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002414 break;
2415
2416 default:
2417 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
2418 }
2419}
2420
David Brazdil66d126e2015-04-03 16:02:44 +01002421void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
2422 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2423 locations->SetInAt(0, Location::RequiresRegister());
2424 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2425}
2426
2427void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01002428 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
2429}
2430
Alexandre Rames5319def2014-10-23 10:03:10 +01002431void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
2432 LocationSummary* locations =
2433 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2434 locations->SetInAt(0, Location::RequiresRegister());
2435 if (instruction->HasUses()) {
2436 locations->SetOut(Location::SameAsFirstInput());
2437 }
2438}
2439
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002440void InstructionCodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00002441 if (codegen_->CanMoveNullCheckToUser(instruction)) {
2442 return;
2443 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002444
Alexandre Ramesd921d642015-04-16 15:07:16 +01002445 BlockPoolsScope block_pools(GetVIXLAssembler());
2446 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002447 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
2448 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2449}
2450
2451void InstructionCodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002452 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
2453 codegen_->AddSlowPath(slow_path);
2454
2455 LocationSummary* locations = instruction->GetLocations();
2456 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00002457
2458 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01002459}
2460
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002461void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
2462 if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2463 GenerateImplicitNullCheck(instruction);
2464 } else {
2465 GenerateExplicitNullCheck(instruction);
2466 }
2467}
2468
Alexandre Rames67555f72014-11-18 10:55:16 +00002469void LocationsBuilderARM64::VisitOr(HOr* instruction) {
2470 HandleBinaryOp(instruction);
2471}
2472
2473void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
2474 HandleBinaryOp(instruction);
2475}
2476
Alexandre Rames3e69f162014-12-10 10:36:50 +00002477void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2478 LOG(FATAL) << "Unreachable";
2479}
2480
2481void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
2482 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2483}
2484
Alexandre Rames5319def2014-10-23 10:03:10 +01002485void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
2486 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2487 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2488 if (location.IsStackSlot()) {
2489 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2490 } else if (location.IsDoubleStackSlot()) {
2491 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2492 }
2493 locations->SetOut(location);
2494}
2495
2496void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
2497 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002498 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002499}
2500
2501void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
2502 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2503 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2504 locations->SetInAt(i, Location::Any());
2505 }
2506 locations->SetOut(Location::Any());
2507}
2508
2509void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002510 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002511 LOG(FATAL) << "Unreachable";
2512}
2513
Serban Constantinescu02164b32014-11-13 14:05:07 +00002514void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002515 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00002516 LocationSummary::CallKind call_kind =
2517 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002518 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2519
2520 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002521 case Primitive::kPrimInt:
2522 case Primitive::kPrimLong:
2523 locations->SetInAt(0, Location::RequiresRegister());
2524 locations->SetInAt(1, Location::RequiresRegister());
2525 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2526 break;
2527
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002528 case Primitive::kPrimFloat:
2529 case Primitive::kPrimDouble: {
2530 InvokeRuntimeCallingConvention calling_convention;
2531 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
2532 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
2533 locations->SetOut(calling_convention.GetReturnLocation(type));
2534
2535 break;
2536 }
2537
Serban Constantinescu02164b32014-11-13 14:05:07 +00002538 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002539 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00002540 }
2541}
2542
2543void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
2544 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002545
Serban Constantinescu02164b32014-11-13 14:05:07 +00002546 switch (type) {
2547 case Primitive::kPrimInt:
2548 case Primitive::kPrimLong: {
2549 UseScratchRegisterScope temps(GetVIXLAssembler());
2550 Register dividend = InputRegisterAt(rem, 0);
2551 Register divisor = InputRegisterAt(rem, 1);
2552 Register output = OutputRegister(rem);
2553 Register temp = temps.AcquireSameSizeAs(output);
2554
2555 __ Sdiv(temp, dividend, divisor);
2556 __ Msub(output, temp, divisor, dividend);
2557 break;
2558 }
2559
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002560 case Primitive::kPrimFloat:
2561 case Primitive::kPrimDouble: {
2562 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
2563 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002564 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002565 break;
2566 }
2567
Serban Constantinescu02164b32014-11-13 14:05:07 +00002568 default:
2569 LOG(FATAL) << "Unexpected rem type " << type;
2570 }
2571}
2572
Calin Juravle27df7582015-04-17 19:12:31 +01002573void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2574 memory_barrier->SetLocations(nullptr);
2575}
2576
2577void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2578 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
2579}
2580
Alexandre Rames5319def2014-10-23 10:03:10 +01002581void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
2582 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2583 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002584 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01002585}
2586
2587void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002588 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002589 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01002590}
2591
2592void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
2593 instruction->SetLocations(nullptr);
2594}
2595
2596void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002597 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002598 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01002599}
2600
Serban Constantinescu02164b32014-11-13 14:05:07 +00002601void LocationsBuilderARM64::VisitShl(HShl* shl) {
2602 HandleShift(shl);
2603}
2604
2605void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
2606 HandleShift(shl);
2607}
2608
2609void LocationsBuilderARM64::VisitShr(HShr* shr) {
2610 HandleShift(shr);
2611}
2612
2613void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
2614 HandleShift(shr);
2615}
2616
Alexandre Rames5319def2014-10-23 10:03:10 +01002617void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
2618 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
2619 Primitive::Type field_type = store->InputAt(1)->GetType();
2620 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002621 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01002622 case Primitive::kPrimBoolean:
2623 case Primitive::kPrimByte:
2624 case Primitive::kPrimChar:
2625 case Primitive::kPrimShort:
2626 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002627 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01002628 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
2629 break;
2630
2631 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002632 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01002633 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
2634 break;
2635
2636 default:
2637 LOG(FATAL) << "Unimplemented local type " << field_type;
2638 }
2639}
2640
2641void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002642 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01002643}
2644
2645void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002646 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002647}
2648
2649void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002650 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002651}
2652
Alexandre Rames67555f72014-11-18 10:55:16 +00002653void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002654 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002655}
2656
2657void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002658 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00002659}
2660
2661void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002662 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002663}
2664
Alexandre Rames67555f72014-11-18 10:55:16 +00002665void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002666 HandleFieldSet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01002667}
2668
2669void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2670 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2671}
2672
2673void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002674 HBasicBlock* block = instruction->GetBlock();
2675 if (block->GetLoopInformation() != nullptr) {
2676 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2677 // The back edge will generate the suspend check.
2678 return;
2679 }
2680 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2681 // The goto will generate the suspend check.
2682 return;
2683 }
2684 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01002685}
2686
2687void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
2688 temp->SetLocations(nullptr);
2689}
2690
2691void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
2692 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002693 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01002694}
2695
Alexandre Rames67555f72014-11-18 10:55:16 +00002696void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
2697 LocationSummary* locations =
2698 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2699 InvokeRuntimeCallingConvention calling_convention;
2700 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2701}
2702
2703void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
2704 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002705 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002706 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002707}
2708
2709void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
2710 LocationSummary* locations =
2711 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2712 Primitive::Type input_type = conversion->GetInputType();
2713 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002714 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00002715 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
2716 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
2717 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
2718 }
2719
Alexandre Rames542361f2015-01-29 16:57:31 +00002720 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002721 locations->SetInAt(0, Location::RequiresFpuRegister());
2722 } else {
2723 locations->SetInAt(0, Location::RequiresRegister());
2724 }
2725
Alexandre Rames542361f2015-01-29 16:57:31 +00002726 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002727 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2728 } else {
2729 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2730 }
2731}
2732
2733void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
2734 Primitive::Type result_type = conversion->GetResultType();
2735 Primitive::Type input_type = conversion->GetInputType();
2736
2737 DCHECK_NE(input_type, result_type);
2738
Alexandre Rames542361f2015-01-29 16:57:31 +00002739 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002740 int result_size = Primitive::ComponentSize(result_type);
2741 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002742 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002743 Register output = OutputRegister(conversion);
2744 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002745 if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
2746 __ Ubfx(output, source, 0, result_size * kBitsPerByte);
2747 } else if ((result_type == Primitive::kPrimChar) ||
2748 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
2749 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002750 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002751 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002752 }
Alexandre Rames542361f2015-01-29 16:57:31 +00002753 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002754 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00002755 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002756 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
2757 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00002758 } else if (Primitive::IsFloatingPointType(result_type) &&
2759 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002760 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
2761 } else {
2762 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
2763 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00002764 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002765}
Alexandre Rames67555f72014-11-18 10:55:16 +00002766
Serban Constantinescu02164b32014-11-13 14:05:07 +00002767void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
2768 HandleShift(ushr);
2769}
2770
2771void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
2772 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00002773}
2774
2775void LocationsBuilderARM64::VisitXor(HXor* instruction) {
2776 HandleBinaryOp(instruction);
2777}
2778
2779void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
2780 HandleBinaryOp(instruction);
2781}
2782
Calin Juravleb1498f62015-02-16 13:13:29 +00002783void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction) {
2784 // Nothing to do, this should be removed during prepare for register allocator.
2785 UNUSED(instruction);
2786 LOG(FATAL) << "Unreachable";
2787}
2788
2789void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction) {
2790 // Nothing to do, this should be removed during prepare for register allocator.
2791 UNUSED(instruction);
2792 LOG(FATAL) << "Unreachable";
2793}
2794
Alexandre Rames67555f72014-11-18 10:55:16 +00002795#undef __
2796#undef QUICK_ENTRY_POINT
2797
Alexandre Rames5319def2014-10-23 10:03:10 +01002798} // namespace arm64
2799} // namespace art