blob: 31900d536ae9b9e5ccf5a9ca31b1b35b5864e266 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Calin Juravlee6f49b42015-09-17 14:04:33 +000022#include "common_arm64.h"
Vladimir Marko58155012015-08-19 12:49:41 +000023#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010024#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080025#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010026#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080027#include "intrinsics.h"
28#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010029#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070030#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000031#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010032#include "thread.h"
33#include "utils/arm64/assembler_arm64.h"
34#include "utils/assembler.h"
35#include "utils/stack_checks.h"
36
37
38using namespace vixl; // NOLINT(build/namespaces)
39
40#ifdef __
41#error "ARM64 Codegen VIXL macro-assembler macro already defined."
42#endif
43
Alexandre Rames5319def2014-10-23 10:03:10 +010044namespace art {
45
46namespace arm64 {
47
Andreas Gampe878d58c2015-01-15 23:24:00 -080048using helpers::CPURegisterFrom;
49using helpers::DRegisterFrom;
50using helpers::FPRegisterFrom;
51using helpers::HeapOperand;
52using helpers::HeapOperandFrom;
53using helpers::InputCPURegisterAt;
54using helpers::InputFPRegisterAt;
55using helpers::InputRegisterAt;
56using helpers::InputOperandAt;
57using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080058using helpers::LocationFrom;
59using helpers::OperandFromMemOperand;
60using helpers::OutputCPURegister;
61using helpers::OutputFPRegister;
62using helpers::OutputRegister;
63using helpers::RegisterFrom;
64using helpers::StackOperandFrom;
65using helpers::VIXLRegCodeFromART;
66using helpers::WRegisterFrom;
67using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000068using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080069using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080070
Alexandre Rames5319def2014-10-23 10:03:10 +010071static constexpr int kCurrentMethodStackOffset = 0;
72
Alexandre Rames5319def2014-10-23 10:03:10 +010073inline Condition ARM64Condition(IfCondition cond) {
74 switch (cond) {
75 case kCondEQ: return eq;
76 case kCondNE: return ne;
77 case kCondLT: return lt;
78 case kCondLE: return le;
79 case kCondGT: return gt;
80 case kCondGE: return ge;
Alexandre Rames5319def2014-10-23 10:03:10 +010081 }
Roland Levillain7f63c522015-07-13 15:54:55 +000082 LOG(FATAL) << "Unreachable";
83 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010084}
85
Alexandre Ramesa89086e2014-11-07 17:13:25 +000086Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +000087 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
88 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
89 // but we use the exact registers for clarity.
90 if (return_type == Primitive::kPrimFloat) {
91 return LocationFrom(s0);
92 } else if (return_type == Primitive::kPrimDouble) {
93 return LocationFrom(d0);
94 } else if (return_type == Primitive::kPrimLong) {
95 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +010096 } else if (return_type == Primitive::kPrimVoid) {
97 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +000098 } else {
99 return LocationFrom(w0);
100 }
101}
102
Alexandre Rames5319def2014-10-23 10:03:10 +0100103Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000104 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100105}
106
Alexandre Rames67555f72014-11-18 10:55:16 +0000107#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
108#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100109
Zheng Xuda403092015-04-24 17:35:39 +0800110// Calculate memory accessing operand for save/restore live registers.
111static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
112 RegisterSet* register_set,
113 int64_t spill_offset,
114 bool is_save) {
115 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
116 codegen->GetNumberOfCoreRegisters(),
117 register_set->GetFloatingPointRegisters(),
118 codegen->GetNumberOfFloatingPointRegisters()));
119
120 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
121 register_set->GetCoreRegisters() & (~callee_saved_core_registers.list()));
122 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
123 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.list()));
124
125 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
126 UseScratchRegisterScope temps(masm);
127
128 Register base = masm->StackPointer();
129 int64_t core_spill_size = core_list.TotalSizeInBytes();
130 int64_t fp_spill_size = fp_list.TotalSizeInBytes();
131 int64_t reg_size = kXRegSizeInBytes;
132 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
133 uint32_t ls_access_size = WhichPowerOf2(reg_size);
134 if (((core_list.Count() > 1) || (fp_list.Count() > 1)) &&
135 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
136 // If the offset does not fit in the instruction's immediate field, use an alternate register
137 // to compute the base address(float point registers spill base address).
138 Register new_base = temps.AcquireSameSizeAs(base);
139 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
140 base = new_base;
141 spill_offset = -core_spill_size;
142 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
143 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
144 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
145 }
146
147 if (is_save) {
148 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
149 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
150 } else {
151 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
152 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
153 }
154}
155
156void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
157 RegisterSet* register_set = locations->GetLiveRegisters();
158 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
159 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
160 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
161 // If the register holds an object, update the stack mask.
162 if (locations->RegisterContainsObject(i)) {
163 locations->SetStackBit(stack_offset / kVRegSize);
164 }
165 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
166 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
167 saved_core_stack_offsets_[i] = stack_offset;
168 stack_offset += kXRegSizeInBytes;
169 }
170 }
171
172 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
173 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
174 register_set->ContainsFloatingPointRegister(i)) {
175 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
176 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
177 saved_fpu_stack_offsets_[i] = stack_offset;
178 stack_offset += kDRegSizeInBytes;
179 }
180 }
181
182 SaveRestoreLiveRegistersHelper(codegen, register_set,
183 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
184}
185
186void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
187 RegisterSet* register_set = locations->GetLiveRegisters();
188 SaveRestoreLiveRegistersHelper(codegen, register_set,
189 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
190}
191
Alexandre Rames5319def2014-10-23 10:03:10 +0100192class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
193 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100194 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : instruction_(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100195
Alexandre Rames67555f72014-11-18 10:55:16 +0000196 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100197 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000198 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100199
Alexandre Rames5319def2014-10-23 10:03:10 +0100200 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000201 if (instruction_->CanThrowIntoCatchBlock()) {
202 // Live registers will be restored in the catch block if caught.
203 SaveLiveRegisters(codegen, instruction_->GetLocations());
204 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000205 // We're moving two locations to locations that could overlap, so we need a parallel
206 // move resolver.
207 InvokeRuntimeCallingConvention calling_convention;
208 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100209 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
210 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000211 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000212 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800213 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100214 }
215
Alexandre Rames8158f282015-08-07 10:26:17 +0100216 bool IsFatal() const OVERRIDE { return true; }
217
Alexandre Rames9931f312015-06-19 14:47:01 +0100218 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
219
Alexandre Rames5319def2014-10-23 10:03:10 +0100220 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000221 HBoundsCheck* const instruction_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000222
Alexandre Rames5319def2014-10-23 10:03:10 +0100223 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
224};
225
Alexandre Rames67555f72014-11-18 10:55:16 +0000226class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
227 public:
228 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
229
230 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
231 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
232 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000233 if (instruction_->CanThrowIntoCatchBlock()) {
234 // Live registers will be restored in the catch block if caught.
235 SaveLiveRegisters(codegen, instruction_->GetLocations());
236 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000237 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000238 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800239 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000240 }
241
Alexandre Rames8158f282015-08-07 10:26:17 +0100242 bool IsFatal() const OVERRIDE { return true; }
243
Alexandre Rames9931f312015-06-19 14:47:01 +0100244 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
245
Alexandre Rames67555f72014-11-18 10:55:16 +0000246 private:
247 HDivZeroCheck* const instruction_;
248 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
249};
250
251class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
252 public:
253 LoadClassSlowPathARM64(HLoadClass* cls,
254 HInstruction* at,
255 uint32_t dex_pc,
256 bool do_clinit)
257 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
258 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
259 }
260
261 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
262 LocationSummary* locations = at_->GetLocations();
263 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
264
265 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000266 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000267
268 InvokeRuntimeCallingConvention calling_convention;
269 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000270 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
271 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000272 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800273 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100274 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800275 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100276 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800277 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000278
279 // Move the class to the desired location.
280 Location out = locations->Out();
281 if (out.IsValid()) {
282 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
283 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000284 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000285 }
286
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000287 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000288 __ B(GetExitLabel());
289 }
290
Alexandre Rames9931f312015-06-19 14:47:01 +0100291 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
292
Alexandre Rames67555f72014-11-18 10:55:16 +0000293 private:
294 // The class this slow path will load.
295 HLoadClass* const cls_;
296
297 // The instruction where this slow path is happening.
298 // (Might be the load class or an initialization check).
299 HInstruction* const at_;
300
301 // The dex PC of `at_`.
302 const uint32_t dex_pc_;
303
304 // Whether to initialize the class.
305 const bool do_clinit_;
306
307 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
308};
309
310class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
311 public:
312 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
313
314 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
315 LocationSummary* locations = instruction_->GetLocations();
316 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
317 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
318
319 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000320 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000321
322 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800323 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000324 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000325 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100326 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000327 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000328 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000329
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000330 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000331 __ B(GetExitLabel());
332 }
333
Alexandre Rames9931f312015-06-19 14:47:01 +0100334 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
335
Alexandre Rames67555f72014-11-18 10:55:16 +0000336 private:
337 HLoadString* const instruction_;
338
339 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
340};
341
Alexandre Rames5319def2014-10-23 10:03:10 +0100342class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
343 public:
344 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
345
Alexandre Rames67555f72014-11-18 10:55:16 +0000346 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
347 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100348 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000349 if (instruction_->CanThrowIntoCatchBlock()) {
350 // Live registers will be restored in the catch block if caught.
351 SaveLiveRegisters(codegen, instruction_->GetLocations());
352 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000353 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000354 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800355 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100356 }
357
Alexandre Rames8158f282015-08-07 10:26:17 +0100358 bool IsFatal() const OVERRIDE { return true; }
359
Alexandre Rames9931f312015-06-19 14:47:01 +0100360 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
361
Alexandre Rames5319def2014-10-23 10:03:10 +0100362 private:
363 HNullCheck* const instruction_;
364
365 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
366};
367
368class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
369 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100370 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
Alexandre Rames5319def2014-10-23 10:03:10 +0100371 : instruction_(instruction), successor_(successor) {}
372
Alexandre Rames67555f72014-11-18 10:55:16 +0000373 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
374 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100375 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000376 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000377 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000378 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800379 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000380 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000381 if (successor_ == nullptr) {
382 __ B(GetReturnLabel());
383 } else {
384 __ B(arm64_codegen->GetLabelOf(successor_));
385 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100386 }
387
388 vixl::Label* GetReturnLabel() {
389 DCHECK(successor_ == nullptr);
390 return &return_label_;
391 }
392
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100393 HBasicBlock* GetSuccessor() const {
394 return successor_;
395 }
396
Alexandre Rames9931f312015-06-19 14:47:01 +0100397 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
398
Alexandre Rames5319def2014-10-23 10:03:10 +0100399 private:
400 HSuspendCheck* const instruction_;
401 // If not null, the block to branch to after the suspend check.
402 HBasicBlock* const successor_;
403
404 // If `successor_` is null, the label to branch to after the suspend check.
405 vixl::Label return_label_;
406
407 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
408};
409
Alexandre Rames67555f72014-11-18 10:55:16 +0000410class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
411 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000412 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
413 : instruction_(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000414
415 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000416 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100417 Location class_to_check = locations->InAt(1);
418 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
419 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000420 DCHECK(instruction_->IsCheckCast()
421 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
422 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100423 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000424
Alexandre Rames67555f72014-11-18 10:55:16 +0000425 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000426
427 if (instruction_->IsCheckCast()) {
428 // The codegen for the instruction overwrites `temp`, so put it back in place.
429 Register obj = InputRegisterAt(instruction_, 0);
430 Register temp = WRegisterFrom(locations->GetTemp(0));
431 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
432 __ Ldr(temp, HeapOperand(obj, class_offset));
433 arm64_codegen->GetAssembler()->MaybeUnpoisonHeapReference(temp);
434 }
435
436 if (!is_fatal_) {
437 SaveLiveRegisters(codegen, locations);
438 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000439
440 // We're moving two locations to locations that could overlap, so we need a parallel
441 // move resolver.
442 InvokeRuntimeCallingConvention calling_convention;
443 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100444 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
445 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000446
447 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000448 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100449 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000450 Primitive::Type ret_type = instruction_->GetType();
451 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
452 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800453 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
454 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000455 } else {
456 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100457 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800458 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000459 }
460
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000461 if (!is_fatal_) {
462 RestoreLiveRegisters(codegen, locations);
463 __ B(GetExitLabel());
464 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000465 }
466
Alexandre Rames9931f312015-06-19 14:47:01 +0100467 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000468 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100469
Alexandre Rames67555f72014-11-18 10:55:16 +0000470 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000471 HInstruction* const instruction_;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000472 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000473
Alexandre Rames67555f72014-11-18 10:55:16 +0000474 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
475};
476
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700477class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
478 public:
479 explicit DeoptimizationSlowPathARM64(HInstruction* instruction)
480 : instruction_(instruction) {}
481
482 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
483 __ Bind(GetEntryLabel());
484 SaveLiveRegisters(codegen, instruction_->GetLocations());
485 DCHECK(instruction_->IsDeoptimize());
486 HDeoptimize* deoptimize = instruction_->AsDeoptimize();
487 uint32_t dex_pc = deoptimize->GetDexPc();
488 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
489 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize), instruction_, dex_pc, this);
490 }
491
Alexandre Rames9931f312015-06-19 14:47:01 +0100492 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
493
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700494 private:
495 HInstruction* const instruction_;
496 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
497};
498
Alexandre Rames5319def2014-10-23 10:03:10 +0100499#undef __
500
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100501Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100502 Location next_location;
503 if (type == Primitive::kPrimVoid) {
504 LOG(FATAL) << "Unreachable type " << type;
505 }
506
Alexandre Rames542361f2015-01-29 16:57:31 +0000507 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100508 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
509 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000510 } else if (!Primitive::IsFloatingPointType(type) &&
511 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000512 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
513 } else {
514 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000515 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
516 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100517 }
518
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000519 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000520 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100521 return next_location;
522}
523
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100524Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100525 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100526}
527
Serban Constantinescu579885a2015-02-22 20:51:33 +0000528CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
529 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100530 const CompilerOptions& compiler_options,
531 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100532 : CodeGenerator(graph,
533 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000534 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000535 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000536 callee_saved_core_registers.list(),
537 callee_saved_fp_registers.list(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100538 compiler_options,
539 stats),
Alexandre Rames5319def2014-10-23 10:03:10 +0100540 block_labels_(nullptr),
541 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000542 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000543 move_resolver_(graph->GetArena(), this),
Vladimir Marko58155012015-08-19 12:49:41 +0000544 isa_features_(isa_features),
545 uint64_literals_(std::less<uint64_t>(), graph->GetArena()->Adapter()),
546 method_patches_(MethodReferenceComparator(), graph->GetArena()->Adapter()),
547 call_patches_(MethodReferenceComparator(), graph->GetArena()->Adapter()),
548 relative_call_patches_(graph->GetArena()->Adapter()),
549 pc_rel_dex_cache_patches_(graph->GetArena()->Adapter()) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000550 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000551 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000552}
Alexandre Rames5319def2014-10-23 10:03:10 +0100553
Alexandre Rames67555f72014-11-18 10:55:16 +0000554#undef __
555#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100556
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000557void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
558 // Ensure we emit the literal pool.
559 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000560
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000561 CodeGenerator::Finalize(allocator);
562}
563
Zheng Xuad4450e2015-04-17 18:48:56 +0800564void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
565 // Note: There are 6 kinds of moves:
566 // 1. constant -> GPR/FPR (non-cycle)
567 // 2. constant -> stack (non-cycle)
568 // 3. GPR/FPR -> GPR/FPR
569 // 4. GPR/FPR -> stack
570 // 5. stack -> GPR/FPR
571 // 6. stack -> stack (non-cycle)
572 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
573 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
574 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
575 // dependency.
576 vixl_temps_.Open(GetVIXLAssembler());
577}
578
579void ParallelMoveResolverARM64::FinishEmitNativeCode() {
580 vixl_temps_.Close();
581}
582
583Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
584 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
585 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
586 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
587 Location scratch = GetScratchLocation(kind);
588 if (!scratch.Equals(Location::NoLocation())) {
589 return scratch;
590 }
591 // Allocate from VIXL temp registers.
592 if (kind == Location::kRegister) {
593 scratch = LocationFrom(vixl_temps_.AcquireX());
594 } else {
595 DCHECK(kind == Location::kFpuRegister);
596 scratch = LocationFrom(vixl_temps_.AcquireD());
597 }
598 AddScratchLocation(scratch);
599 return scratch;
600}
601
602void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
603 if (loc.IsRegister()) {
604 vixl_temps_.Release(XRegisterFrom(loc));
605 } else {
606 DCHECK(loc.IsFpuRegister());
607 vixl_temps_.Release(DRegisterFrom(loc));
608 }
609 RemoveScratchLocation(loc);
610}
611
Alexandre Rames3e69f162014-12-10 10:36:50 +0000612void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100613 DCHECK_LT(index, moves_.size());
614 MoveOperands* move = moves_[index];
Alexandre Rames3e69f162014-12-10 10:36:50 +0000615 codegen_->MoveLocation(move->GetDestination(), move->GetSource());
616}
617
Alexandre Rames5319def2014-10-23 10:03:10 +0100618void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100619 MacroAssembler* masm = GetVIXLAssembler();
620 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000621 __ Bind(&frame_entry_label_);
622
Serban Constantinescu02164b32014-11-13 14:05:07 +0000623 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
624 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100625 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000626 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000627 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000628 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000629 __ Ldr(wzr, MemOperand(temp, 0));
630 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000631 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100632
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000633 if (!HasEmptyFrame()) {
634 int frame_size = GetFrameSize();
635 // Stack layout:
636 // sp[frame_size - 8] : lr.
637 // ... : other preserved core registers.
638 // ... : other preserved fp registers.
639 // ... : reserved frame space.
640 // sp[0] : current method.
641 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100642 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +0800643 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
644 frame_size - GetCoreSpillSize());
645 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
646 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000647 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100648}
649
650void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100651 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +0100652 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000653 if (!HasEmptyFrame()) {
654 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +0800655 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
656 frame_size - FrameEntrySpillSize());
657 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
658 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000659 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100660 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000661 }
David Srbeckyc34dc932015-04-12 09:27:43 +0100662 __ Ret();
663 GetAssembler()->cfi().RestoreState();
664 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +0100665}
666
Zheng Xuda403092015-04-24 17:35:39 +0800667vixl::CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
668 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
669 return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize,
670 core_spill_mask_);
671}
672
673vixl::CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
674 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
675 GetNumberOfFloatingPointRegisters()));
676 return vixl::CPURegList(vixl::CPURegister::kFPRegister, vixl::kDRegSize,
677 fpu_spill_mask_);
678}
679
Alexandre Rames5319def2014-10-23 10:03:10 +0100680void CodeGeneratorARM64::Bind(HBasicBlock* block) {
681 __ Bind(GetLabelOf(block));
682}
683
Alexandre Rames5319def2014-10-23 10:03:10 +0100684void CodeGeneratorARM64::Move(HInstruction* instruction,
685 Location location,
686 HInstruction* move_for) {
687 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +0100688 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000689 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100690
Nicolas Geoffray9b1eba32015-07-13 15:55:26 +0100691 if (instruction->IsFakeString()) {
692 // The fake string is an alias for null.
693 DCHECK(IsBaseline());
694 instruction = locations->Out().GetConstant();
695 DCHECK(instruction->IsNullConstant()) << instruction->DebugName();
696 }
697
Nicolas Geoffray76b1e172015-05-27 17:18:33 +0100698 if (instruction->IsCurrentMethod()) {
Mathieu Chartiere3b034a2015-05-31 14:29:23 -0700699 MoveLocation(location, Location::DoubleStackSlot(kCurrentMethodStackOffset));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +0100700 } else if (locations != nullptr && locations->Out().Equals(location)) {
701 return;
702 } else if (instruction->IsIntConstant()
703 || instruction->IsLongConstant()
704 || instruction->IsNullConstant()) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000705 int64_t value = GetInt64ValueOf(instruction->AsConstant());
Alexandre Rames5319def2014-10-23 10:03:10 +0100706 if (location.IsRegister()) {
707 Register dst = RegisterFrom(location, type);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000708 DCHECK(((instruction->IsIntConstant() || instruction->IsNullConstant()) && dst.Is32Bits()) ||
Alexandre Rames5319def2014-10-23 10:03:10 +0100709 (instruction->IsLongConstant() && dst.Is64Bits()));
710 __ Mov(dst, value);
711 } else {
712 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000713 UseScratchRegisterScope temps(GetVIXLAssembler());
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000714 Register temp = (instruction->IsIntConstant() || instruction->IsNullConstant())
715 ? temps.AcquireW()
716 : temps.AcquireX();
Alexandre Rames5319def2014-10-23 10:03:10 +0100717 __ Mov(temp, value);
718 __ Str(temp, StackOperandFrom(location));
719 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000720 } else if (instruction->IsTemporary()) {
721 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000722 MoveLocation(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100723 } else if (instruction->IsLoadLocal()) {
724 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Rames542361f2015-01-29 16:57:31 +0000725 if (Primitive::Is64BitType(type)) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000726 MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000727 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000728 MoveLocation(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100729 }
730
731 } else {
732 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000733 MoveLocation(location, locations->Out(), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100734 }
735}
736
Calin Juravle175dc732015-08-25 15:42:32 +0100737void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
738 DCHECK(location.IsRegister());
739 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
740}
741
Alexandre Rames5319def2014-10-23 10:03:10 +0100742Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
743 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000744
Alexandre Rames5319def2014-10-23 10:03:10 +0100745 switch (type) {
746 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000747 case Primitive::kPrimInt:
748 case Primitive::kPrimFloat:
749 return Location::StackSlot(GetStackSlot(load->GetLocal()));
750
751 case Primitive::kPrimLong:
752 case Primitive::kPrimDouble:
753 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
754
Alexandre Rames5319def2014-10-23 10:03:10 +0100755 case Primitive::kPrimBoolean:
756 case Primitive::kPrimByte:
757 case Primitive::kPrimChar:
758 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100759 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100760 LOG(FATAL) << "Unexpected type " << type;
761 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000762
Alexandre Rames5319def2014-10-23 10:03:10 +0100763 LOG(FATAL) << "Unreachable";
764 return Location::NoLocation();
765}
766
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100767void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000768 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100769 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000770 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +0100771 vixl::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100772 if (value_can_be_null) {
773 __ Cbz(value, &done);
774 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100775 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
776 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000777 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100778 if (value_can_be_null) {
779 __ Bind(&done);
780 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100781}
782
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000783void CodeGeneratorARM64::SetupBlockedRegisters(bool is_baseline) const {
784 // Blocked core registers:
785 // lr : Runtime reserved.
786 // tr : Runtime reserved.
787 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
788 // ip1 : VIXL core temp.
789 // ip0 : VIXL core temp.
790 //
791 // Blocked fp registers:
792 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +0100793 CPURegList reserved_core_registers = vixl_reserved_core_registers;
794 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100795 while (!reserved_core_registers.IsEmpty()) {
796 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
797 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000798
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000799 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +0800800 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000801 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
802 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000803
804 if (is_baseline) {
805 CPURegList reserved_core_baseline_registers = callee_saved_core_registers;
806 while (!reserved_core_baseline_registers.IsEmpty()) {
807 blocked_core_registers_[reserved_core_baseline_registers.PopLowestIndex().code()] = true;
808 }
809
810 CPURegList reserved_fp_baseline_registers = callee_saved_fp_registers;
811 while (!reserved_fp_baseline_registers.IsEmpty()) {
812 blocked_fpu_registers_[reserved_fp_baseline_registers.PopLowestIndex().code()] = true;
813 }
814 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100815}
816
817Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
818 if (type == Primitive::kPrimVoid) {
819 LOG(FATAL) << "Unreachable type " << type;
820 }
821
Alexandre Rames542361f2015-01-29 16:57:31 +0000822 if (Primitive::IsFloatingPointType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000823 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
824 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100825 return Location::FpuRegisterLocation(reg);
826 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000827 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
828 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100829 return Location::RegisterLocation(reg);
830 }
831}
832
Alexandre Rames3e69f162014-12-10 10:36:50 +0000833size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
834 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
835 __ Str(reg, MemOperand(sp, stack_index));
836 return kArm64WordSize;
837}
838
839size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
840 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
841 __ Ldr(reg, MemOperand(sp, stack_index));
842 return kArm64WordSize;
843}
844
845size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
846 FPRegister reg = FPRegister(reg_id, kDRegSize);
847 __ Str(reg, MemOperand(sp, stack_index));
848 return kArm64WordSize;
849}
850
851size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
852 FPRegister reg = FPRegister(reg_id, kDRegSize);
853 __ Ldr(reg, MemOperand(sp, stack_index));
854 return kArm64WordSize;
855}
856
Alexandre Rames5319def2014-10-23 10:03:10 +0100857void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100858 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +0100859}
860
861void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100862 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +0100863}
864
Alexandre Rames67555f72014-11-18 10:55:16 +0000865void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000866 if (constant->IsIntConstant()) {
867 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
868 } else if (constant->IsLongConstant()) {
869 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
870 } else if (constant->IsNullConstant()) {
871 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +0000872 } else if (constant->IsFloatConstant()) {
873 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
874 } else {
875 DCHECK(constant->IsDoubleConstant());
876 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
877 }
878}
879
Alexandre Rames3e69f162014-12-10 10:36:50 +0000880
881static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
882 DCHECK(constant.IsConstant());
883 HConstant* cst = constant.GetConstant();
884 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000885 // Null is mapped to a core W register, which we associate with kPrimInt.
886 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +0000887 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
888 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
889 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
890}
891
892void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000893 if (source.Equals(destination)) {
894 return;
895 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000896
897 // A valid move can always be inferred from the destination and source
898 // locations. When moving from and to a register, the argument type can be
899 // used to generate 32bit instead of 64bit moves. In debug mode we also
900 // checks the coherency of the locations and the type.
901 bool unspecified_type = (type == Primitive::kPrimVoid);
902
903 if (destination.IsRegister() || destination.IsFpuRegister()) {
904 if (unspecified_type) {
905 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
906 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000907 (src_cst != nullptr && (src_cst->IsIntConstant()
908 || src_cst->IsFloatConstant()
909 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000910 // For stack slots and 32bit constants, a 64bit type is appropriate.
911 type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +0000912 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000913 // If the source is a double stack slot or a 64bit constant, a 64bit
914 // type is appropriate. Else the source is a register, and since the
915 // type has not been specified, we chose a 64bit type to force a 64bit
916 // move.
917 type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +0000918 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000919 }
Alexandre Rames542361f2015-01-29 16:57:31 +0000920 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(type)) ||
921 (destination.IsRegister() && !Primitive::IsFloatingPointType(type)));
Alexandre Rames3e69f162014-12-10 10:36:50 +0000922 CPURegister dst = CPURegisterFrom(destination, type);
923 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
924 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
925 __ Ldr(dst, StackOperandFrom(source));
926 } else if (source.IsConstant()) {
927 DCHECK(CoherentConstantAndType(source, type));
928 MoveConstant(dst, source.GetConstant());
929 } else {
930 if (destination.IsRegister()) {
931 __ Mov(Register(dst), RegisterFrom(source, type));
932 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +0800933 DCHECK(destination.IsFpuRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000934 __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
935 }
936 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000937 } else { // The destination is not a register. It must be a stack slot.
938 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
939 if (source.IsRegister() || source.IsFpuRegister()) {
940 if (unspecified_type) {
941 if (source.IsRegister()) {
942 type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
943 } else {
944 type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
945 }
946 }
Alexandre Rames542361f2015-01-29 16:57:31 +0000947 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(type)) &&
948 (source.IsFpuRegister() == Primitive::IsFloatingPointType(type)));
Alexandre Rames3e69f162014-12-10 10:36:50 +0000949 __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
950 } else if (source.IsConstant()) {
Nicolas Geoffray9b1eba32015-07-13 15:55:26 +0100951 DCHECK(unspecified_type || CoherentConstantAndType(source, type)) << source << " " << type;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000952 UseScratchRegisterScope temps(GetVIXLAssembler());
953 HConstant* src_cst = source.GetConstant();
954 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000955 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000956 temp = temps.AcquireW();
957 } else if (src_cst->IsLongConstant()) {
958 temp = temps.AcquireX();
959 } else if (src_cst->IsFloatConstant()) {
960 temp = temps.AcquireS();
961 } else {
962 DCHECK(src_cst->IsDoubleConstant());
963 temp = temps.AcquireD();
964 }
965 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000966 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000967 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +0000968 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000969 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000970 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000971 // There is generally less pressure on FP registers.
972 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000973 __ Ldr(temp, StackOperandFrom(source));
974 __ Str(temp, StackOperandFrom(destination));
975 }
976 }
977}
978
979void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000980 CPURegister dst,
981 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000982 switch (type) {
983 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +0000984 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000985 break;
986 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +0000987 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000988 break;
989 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +0000990 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000991 break;
992 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +0000993 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000994 break;
995 case Primitive::kPrimInt:
996 case Primitive::kPrimNot:
997 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000998 case Primitive::kPrimFloat:
999 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001000 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001001 __ Ldr(dst, src);
1002 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001003 case Primitive::kPrimVoid:
1004 LOG(FATAL) << "Unreachable type " << type;
1005 }
1006}
1007
Calin Juravle77520bc2015-01-12 18:45:46 +00001008void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001009 CPURegister dst,
1010 const MemOperand& src) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001011 MacroAssembler* masm = GetVIXLAssembler();
1012 BlockPoolsScope block_pools(masm);
1013 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001014 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001015 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001016
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001017 DCHECK(!src.IsPreIndex());
1018 DCHECK(!src.IsPostIndex());
1019
1020 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001021 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001022 MemOperand base = MemOperand(temp_base);
1023 switch (type) {
1024 case Primitive::kPrimBoolean:
1025 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +00001026 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001027 break;
1028 case Primitive::kPrimByte:
1029 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +00001030 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001031 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1032 break;
1033 case Primitive::kPrimChar:
1034 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +00001035 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001036 break;
1037 case Primitive::kPrimShort:
1038 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +00001039 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001040 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1041 break;
1042 case Primitive::kPrimInt:
1043 case Primitive::kPrimNot:
1044 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001045 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001046 __ Ldar(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +00001047 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001048 break;
1049 case Primitive::kPrimFloat:
1050 case Primitive::kPrimDouble: {
1051 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001052 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001053
1054 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1055 __ Ldar(temp, base);
Calin Juravle77520bc2015-01-12 18:45:46 +00001056 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001057 __ Fmov(FPRegister(dst), temp);
1058 break;
1059 }
1060 case Primitive::kPrimVoid:
1061 LOG(FATAL) << "Unreachable type " << type;
1062 }
1063}
1064
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001065void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001066 CPURegister src,
1067 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001068 switch (type) {
1069 case Primitive::kPrimBoolean:
1070 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001071 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001072 break;
1073 case Primitive::kPrimChar:
1074 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001075 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001076 break;
1077 case Primitive::kPrimInt:
1078 case Primitive::kPrimNot:
1079 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001080 case Primitive::kPrimFloat:
1081 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001082 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001083 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001084 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001085 case Primitive::kPrimVoid:
1086 LOG(FATAL) << "Unreachable type " << type;
1087 }
1088}
1089
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001090void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1091 CPURegister src,
1092 const MemOperand& dst) {
1093 UseScratchRegisterScope temps(GetVIXLAssembler());
1094 Register temp_base = temps.AcquireX();
1095
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001096 DCHECK(!dst.IsPreIndex());
1097 DCHECK(!dst.IsPostIndex());
1098
1099 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001100 Operand op = OperandFromMemOperand(dst);
1101 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001102 MemOperand base = MemOperand(temp_base);
1103 switch (type) {
1104 case Primitive::kPrimBoolean:
1105 case Primitive::kPrimByte:
1106 __ Stlrb(Register(src), base);
1107 break;
1108 case Primitive::kPrimChar:
1109 case Primitive::kPrimShort:
1110 __ Stlrh(Register(src), base);
1111 break;
1112 case Primitive::kPrimInt:
1113 case Primitive::kPrimNot:
1114 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001115 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001116 __ Stlr(Register(src), base);
1117 break;
1118 case Primitive::kPrimFloat:
1119 case Primitive::kPrimDouble: {
1120 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001121 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001122
1123 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1124 __ Fmov(temp, FPRegister(src));
1125 __ Stlr(temp, base);
1126 break;
1127 }
1128 case Primitive::kPrimVoid:
1129 LOG(FATAL) << "Unreachable type " << type;
1130 }
1131}
1132
Calin Juravle175dc732015-08-25 15:42:32 +01001133void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1134 HInstruction* instruction,
1135 uint32_t dex_pc,
1136 SlowPathCode* slow_path) {
1137 InvokeRuntime(GetThreadOffset<kArm64WordSize>(entrypoint).Int32Value(),
1138 instruction,
1139 dex_pc,
1140 slow_path);
1141}
1142
Alexandre Rames67555f72014-11-18 10:55:16 +00001143void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1144 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001145 uint32_t dex_pc,
1146 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001147 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001148 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001149 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1150 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001151 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001152}
1153
1154void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1155 vixl::Register class_reg) {
1156 UseScratchRegisterScope temps(GetVIXLAssembler());
1157 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001158 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
Serban Constantinescu579885a2015-02-22 20:51:33 +00001159 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001160
Serban Constantinescu02164b32014-11-13 14:05:07 +00001161 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu579885a2015-02-22 20:51:33 +00001162 if (use_acquire_release) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001163 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1164 __ Add(temp, class_reg, status_offset);
1165 __ Ldar(temp, HeapOperand(temp));
1166 __ Cmp(temp, mirror::Class::kStatusInitialized);
1167 __ B(lt, slow_path->GetEntryLabel());
1168 } else {
1169 __ Ldr(temp, HeapOperand(class_reg, status_offset));
1170 __ Cmp(temp, mirror::Class::kStatusInitialized);
1171 __ B(lt, slow_path->GetEntryLabel());
1172 __ Dmb(InnerShareable, BarrierReads);
1173 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001174 __ Bind(slow_path->GetExitLabel());
1175}
Alexandre Rames5319def2014-10-23 10:03:10 +01001176
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001177void InstructionCodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
1178 BarrierType type = BarrierAll;
1179
1180 switch (kind) {
1181 case MemBarrierKind::kAnyAny:
1182 case MemBarrierKind::kAnyStore: {
1183 type = BarrierAll;
1184 break;
1185 }
1186 case MemBarrierKind::kLoadAny: {
1187 type = BarrierReads;
1188 break;
1189 }
1190 case MemBarrierKind::kStoreStore: {
1191 type = BarrierWrites;
1192 break;
1193 }
1194 default:
1195 LOG(FATAL) << "Unexpected memory barrier " << kind;
1196 }
1197 __ Dmb(InnerShareable, type);
1198}
1199
Serban Constantinescu02164b32014-11-13 14:05:07 +00001200void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1201 HBasicBlock* successor) {
1202 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001203 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1204 if (slow_path == nullptr) {
1205 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1206 instruction->SetSlowPath(slow_path);
1207 codegen_->AddSlowPath(slow_path);
1208 if (successor != nullptr) {
1209 DCHECK(successor->IsLoopHeader());
1210 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1211 }
1212 } else {
1213 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1214 }
1215
Serban Constantinescu02164b32014-11-13 14:05:07 +00001216 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1217 Register temp = temps.AcquireW();
1218
1219 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1220 if (successor == nullptr) {
1221 __ Cbnz(temp, slow_path->GetEntryLabel());
1222 __ Bind(slow_path->GetReturnLabel());
1223 } else {
1224 __ Cbz(temp, codegen_->GetLabelOf(successor));
1225 __ B(slow_path->GetEntryLabel());
1226 // slow_path will return to GetLabelOf(successor).
1227 }
1228}
1229
Alexandre Rames5319def2014-10-23 10:03:10 +01001230InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1231 CodeGeneratorARM64* codegen)
1232 : HGraphVisitor(graph),
1233 assembler_(codegen->GetAssembler()),
1234 codegen_(codegen) {}
1235
1236#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001237 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001238
1239#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1240
1241enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001242 // Using a base helps identify when we hit such breakpoints.
1243 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001244#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1245 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1246#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1247};
1248
1249#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
1250 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001251 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +01001252 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1253 } \
1254 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1255 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1256 locations->SetOut(Location::Any()); \
1257 }
1258 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1259#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1260
1261#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001262#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001263
Alexandre Rames67555f72014-11-18 10:55:16 +00001264void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001265 DCHECK_EQ(instr->InputCount(), 2U);
1266 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1267 Primitive::Type type = instr->GetResultType();
1268 switch (type) {
1269 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001270 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001271 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001272 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001273 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001274 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001275
1276 case Primitive::kPrimFloat:
1277 case Primitive::kPrimDouble:
1278 locations->SetInAt(0, Location::RequiresFpuRegister());
1279 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001280 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001281 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001282
Alexandre Rames5319def2014-10-23 10:03:10 +01001283 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001284 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001285 }
1286}
1287
Alexandre Rames09a99962015-04-15 11:47:56 +01001288void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
1289 LocationSummary* locations =
1290 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1291 locations->SetInAt(0, Location::RequiresRegister());
1292 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1293 locations->SetOut(Location::RequiresFpuRegister());
1294 } else {
1295 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1296 }
1297}
1298
1299void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1300 const FieldInfo& field_info) {
1301 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain4d027112015-07-01 15:41:14 +01001302 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001303 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001304
1305 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
1306 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
1307
1308 if (field_info.IsVolatile()) {
1309 if (use_acquire_release) {
1310 // NB: LoadAcquire will record the pc info if needed.
1311 codegen_->LoadAcquire(instruction, OutputCPURegister(instruction), field);
1312 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001313 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001314 codegen_->MaybeRecordImplicitNullCheck(instruction);
1315 // For IRIW sequential consistency kLoadAny is not sufficient.
1316 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1317 }
1318 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001319 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001320 codegen_->MaybeRecordImplicitNullCheck(instruction);
1321 }
Roland Levillain4d027112015-07-01 15:41:14 +01001322
1323 if (field_type == Primitive::kPrimNot) {
1324 GetAssembler()->MaybeUnpoisonHeapReference(OutputCPURegister(instruction).W());
1325 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001326}
1327
1328void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1329 LocationSummary* locations =
1330 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1331 locations->SetInAt(0, Location::RequiresRegister());
1332 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1333 locations->SetInAt(1, Location::RequiresFpuRegister());
1334 } else {
1335 locations->SetInAt(1, Location::RequiresRegister());
1336 }
1337}
1338
1339void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001340 const FieldInfo& field_info,
1341 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001342 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001343 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001344
1345 Register obj = InputRegisterAt(instruction, 0);
1346 CPURegister value = InputCPURegisterAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001347 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001348 Offset offset = field_info.GetFieldOffset();
1349 Primitive::Type field_type = field_info.GetFieldType();
1350 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
1351
Roland Levillain4d027112015-07-01 15:41:14 +01001352 {
1353 // We use a block to end the scratch scope before the write barrier, thus
1354 // freeing the temporary registers so they can be used in `MarkGCCard`.
1355 UseScratchRegisterScope temps(GetVIXLAssembler());
1356
1357 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1358 DCHECK(value.IsW());
1359 Register temp = temps.AcquireW();
1360 __ Mov(temp, value.W());
1361 GetAssembler()->PoisonHeapReference(temp.W());
1362 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001363 }
Roland Levillain4d027112015-07-01 15:41:14 +01001364
1365 if (field_info.IsVolatile()) {
1366 if (use_acquire_release) {
1367 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1368 codegen_->MaybeRecordImplicitNullCheck(instruction);
1369 } else {
1370 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
1371 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1372 codegen_->MaybeRecordImplicitNullCheck(instruction);
1373 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1374 }
1375 } else {
1376 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1377 codegen_->MaybeRecordImplicitNullCheck(instruction);
1378 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001379 }
1380
1381 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001382 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001383 }
1384}
1385
Alexandre Rames67555f72014-11-18 10:55:16 +00001386void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001387 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001388
1389 switch (type) {
1390 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001391 case Primitive::kPrimLong: {
1392 Register dst = OutputRegister(instr);
1393 Register lhs = InputRegisterAt(instr, 0);
1394 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001395 if (instr->IsAdd()) {
1396 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001397 } else if (instr->IsAnd()) {
1398 __ And(dst, lhs, rhs);
1399 } else if (instr->IsOr()) {
1400 __ Orr(dst, lhs, rhs);
1401 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001402 __ Sub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001403 } else {
1404 DCHECK(instr->IsXor());
1405 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001406 }
1407 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001408 }
1409 case Primitive::kPrimFloat:
1410 case Primitive::kPrimDouble: {
1411 FPRegister dst = OutputFPRegister(instr);
1412 FPRegister lhs = InputFPRegisterAt(instr, 0);
1413 FPRegister rhs = InputFPRegisterAt(instr, 1);
1414 if (instr->IsAdd()) {
1415 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001416 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001417 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001418 } else {
1419 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001420 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001421 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001422 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001423 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001424 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001425 }
1426}
1427
Serban Constantinescu02164b32014-11-13 14:05:07 +00001428void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1429 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1430
1431 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1432 Primitive::Type type = instr->GetResultType();
1433 switch (type) {
1434 case Primitive::kPrimInt:
1435 case Primitive::kPrimLong: {
1436 locations->SetInAt(0, Location::RequiresRegister());
1437 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1438 locations->SetOut(Location::RequiresRegister());
1439 break;
1440 }
1441 default:
1442 LOG(FATAL) << "Unexpected shift type " << type;
1443 }
1444}
1445
1446void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1447 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1448
1449 Primitive::Type type = instr->GetType();
1450 switch (type) {
1451 case Primitive::kPrimInt:
1452 case Primitive::kPrimLong: {
1453 Register dst = OutputRegister(instr);
1454 Register lhs = InputRegisterAt(instr, 0);
1455 Operand rhs = InputOperandAt(instr, 1);
1456 if (rhs.IsImmediate()) {
1457 uint32_t shift_value = (type == Primitive::kPrimInt)
1458 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1459 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1460 if (instr->IsShl()) {
1461 __ Lsl(dst, lhs, shift_value);
1462 } else if (instr->IsShr()) {
1463 __ Asr(dst, lhs, shift_value);
1464 } else {
1465 __ Lsr(dst, lhs, shift_value);
1466 }
1467 } else {
1468 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1469
1470 if (instr->IsShl()) {
1471 __ Lsl(dst, lhs, rhs_reg);
1472 } else if (instr->IsShr()) {
1473 __ Asr(dst, lhs, rhs_reg);
1474 } else {
1475 __ Lsr(dst, lhs, rhs_reg);
1476 }
1477 }
1478 break;
1479 }
1480 default:
1481 LOG(FATAL) << "Unexpected shift operation type " << type;
1482 }
1483}
1484
Alexandre Rames5319def2014-10-23 10:03:10 +01001485void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001486 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001487}
1488
1489void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001490 HandleBinaryOp(instruction);
1491}
1492
1493void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1494 HandleBinaryOp(instruction);
1495}
1496
1497void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1498 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001499}
1500
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001501void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1502 LocationSummary* locations =
1503 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1504 locations->SetInAt(0, Location::RequiresRegister());
1505 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01001506 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1507 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1508 } else {
1509 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1510 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001511}
1512
1513void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1514 LocationSummary* locations = instruction->GetLocations();
1515 Primitive::Type type = instruction->GetType();
1516 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001517 Location index = locations->InAt(1);
1518 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001519 MemOperand source = HeapOperand(obj);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001520 MacroAssembler* masm = GetVIXLAssembler();
1521 UseScratchRegisterScope temps(masm);
1522 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001523
1524 if (index.IsConstant()) {
1525 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001526 source = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001527 } else {
1528 Register temp = temps.AcquireSameSizeAs(obj);
Alexandre Rames82000b02015-07-07 11:34:16 +01001529 __ Add(temp, obj, offset);
1530 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001531 }
1532
Alexandre Rames67555f72014-11-18 10:55:16 +00001533 codegen_->Load(type, OutputCPURegister(instruction), source);
Calin Juravle77520bc2015-01-12 18:45:46 +00001534 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001535
1536 if (type == Primitive::kPrimNot) {
1537 GetAssembler()->MaybeUnpoisonHeapReference(OutputCPURegister(instruction).W());
1538 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001539}
1540
Alexandre Rames5319def2014-10-23 10:03:10 +01001541void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1542 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1543 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001544 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001545}
1546
1547void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001548 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001549 __ Ldr(OutputRegister(instruction),
1550 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
Calin Juravle77520bc2015-01-12 18:45:46 +00001551 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001552}
1553
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001554void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Alexandre Rames97833a02015-04-16 15:07:12 +01001555 if (instruction->NeedsTypeCheck()) {
1556 LocationSummary* locations =
1557 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001558 InvokeRuntimeCallingConvention calling_convention;
1559 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1560 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1561 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1562 } else {
Alexandre Rames97833a02015-04-16 15:07:12 +01001563 LocationSummary* locations =
1564 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001565 locations->SetInAt(0, Location::RequiresRegister());
1566 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01001567 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
1568 locations->SetInAt(2, Location::RequiresFpuRegister());
1569 } else {
1570 locations->SetInAt(2, Location::RequiresRegister());
1571 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001572 }
1573}
1574
1575void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1576 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01001577 LocationSummary* locations = instruction->GetLocations();
1578 bool needs_runtime_call = locations->WillCall();
1579
1580 if (needs_runtime_call) {
Roland Levillain4d027112015-07-01 15:41:14 +01001581 // Note: if heap poisoning is enabled, pAputObject takes cares
1582 // of poisoning the reference.
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001583 codegen_->InvokeRuntime(
1584 QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001585 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001586 } else {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001587 Register obj = InputRegisterAt(instruction, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001588 CPURegister value = InputCPURegisterAt(instruction, 2);
Roland Levillain4d027112015-07-01 15:41:14 +01001589 CPURegister source = value;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001590 Location index = locations->InAt(1);
1591 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001592 MemOperand destination = HeapOperand(obj);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001593 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001594 BlockPoolsScope block_pools(masm);
Alexandre Rames97833a02015-04-16 15:07:12 +01001595 {
1596 // We use a block to end the scratch scope before the write barrier, thus
1597 // freeing the temporary registers so they can be used in `MarkGCCard`.
1598 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001599
Roland Levillain4d027112015-07-01 15:41:14 +01001600 if (kPoisonHeapReferences && value_type == Primitive::kPrimNot) {
1601 DCHECK(value.IsW());
1602 Register temp = temps.AcquireW();
1603 __ Mov(temp, value.W());
1604 GetAssembler()->PoisonHeapReference(temp.W());
1605 source = temp;
1606 }
1607
Alexandre Rames97833a02015-04-16 15:07:12 +01001608 if (index.IsConstant()) {
1609 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
1610 destination = HeapOperand(obj, offset);
1611 } else {
1612 Register temp = temps.AcquireSameSizeAs(obj);
Alexandre Rames82000b02015-07-07 11:34:16 +01001613 __ Add(temp, obj, offset);
1614 destination = HeapOperand(temp,
1615 XRegisterFrom(index),
1616 LSL,
1617 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01001618 }
1619
Roland Levillain4d027112015-07-01 15:41:14 +01001620 codegen_->Store(value_type, source, destination);
Alexandre Rames97833a02015-04-16 15:07:12 +01001621 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001622 }
Alexandre Rames97833a02015-04-16 15:07:12 +01001623 if (CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue())) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001624 codegen_->MarkGCCard(obj, value.W(), instruction->GetValueCanBeNull());
Alexandre Rames97833a02015-04-16 15:07:12 +01001625 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001626 }
1627}
1628
Alexandre Rames67555f72014-11-18 10:55:16 +00001629void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00001630 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
1631 ? LocationSummary::kCallOnSlowPath
1632 : LocationSummary::kNoCall;
1633 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00001634 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00001635 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00001636 if (instruction->HasUses()) {
1637 locations->SetOut(Location::SameAsFirstInput());
1638 }
1639}
1640
1641void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001642 BoundsCheckSlowPathARM64* slow_path =
1643 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00001644 codegen_->AddSlowPath(slow_path);
1645
1646 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1647 __ B(slow_path->GetEntryLabel(), hs);
1648}
1649
Alexandre Rames67555f72014-11-18 10:55:16 +00001650void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1651 LocationSummary* locations =
1652 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1653 locations->SetInAt(0, Location::RequiresRegister());
1654 if (check->HasUses()) {
1655 locations->SetOut(Location::SameAsFirstInput());
1656 }
1657}
1658
1659void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1660 // We assume the class is not null.
1661 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1662 check->GetLoadClass(), check, check->GetDexPc(), true);
1663 codegen_->AddSlowPath(slow_path);
1664 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1665}
1666
Roland Levillain7f63c522015-07-13 15:54:55 +00001667static bool IsFloatingPointZeroConstant(HInstruction* instruction) {
1668 return (instruction->IsFloatConstant() && (instruction->AsFloatConstant()->GetValue() == 0.0f))
1669 || (instruction->IsDoubleConstant() && (instruction->AsDoubleConstant()->GetValue() == 0.0));
1670}
1671
Serban Constantinescu02164b32014-11-13 14:05:07 +00001672void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001673 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00001674 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1675 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001676 switch (in_type) {
1677 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001678 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001679 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001680 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1681 break;
1682 }
1683 case Primitive::kPrimFloat:
1684 case Primitive::kPrimDouble: {
1685 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00001686 locations->SetInAt(1,
1687 IsFloatingPointZeroConstant(compare->InputAt(1))
1688 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
1689 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00001690 locations->SetOut(Location::RequiresRegister());
1691 break;
1692 }
1693 default:
1694 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1695 }
1696}
1697
1698void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1699 Primitive::Type in_type = compare->InputAt(0)->GetType();
1700
1701 // 0 if: left == right
1702 // 1 if: left > right
1703 // -1 if: left < right
1704 switch (in_type) {
1705 case Primitive::kPrimLong: {
1706 Register result = OutputRegister(compare);
1707 Register left = InputRegisterAt(compare, 0);
1708 Operand right = InputOperandAt(compare, 1);
1709
1710 __ Cmp(left, right);
1711 __ Cset(result, ne);
1712 __ Cneg(result, result, lt);
1713 break;
1714 }
1715 case Primitive::kPrimFloat:
1716 case Primitive::kPrimDouble: {
1717 Register result = OutputRegister(compare);
1718 FPRegister left = InputFPRegisterAt(compare, 0);
Alexandre Rames93415462015-02-17 15:08:20 +00001719 if (compare->GetLocations()->InAt(1).IsConstant()) {
Roland Levillain7f63c522015-07-13 15:54:55 +00001720 DCHECK(IsFloatingPointZeroConstant(compare->GetLocations()->InAt(1).GetConstant()));
1721 // 0.0 is the only immediate that can be encoded directly in an FCMP instruction.
Alexandre Rames93415462015-02-17 15:08:20 +00001722 __ Fcmp(left, 0.0);
1723 } else {
1724 __ Fcmp(left, InputFPRegisterAt(compare, 1));
1725 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001726 if (compare->IsGtBias()) {
1727 __ Cset(result, ne);
1728 } else {
1729 __ Csetm(result, ne);
1730 }
1731 __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
Alexandre Rames5319def2014-10-23 10:03:10 +01001732 break;
1733 }
1734 default:
1735 LOG(FATAL) << "Unimplemented compare type " << in_type;
1736 }
1737}
1738
1739void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1740 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00001741
1742 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
1743 locations->SetInAt(0, Location::RequiresFpuRegister());
1744 locations->SetInAt(1,
1745 IsFloatingPointZeroConstant(instruction->InputAt(1))
1746 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
1747 : Location::RequiresFpuRegister());
1748 } else {
1749 // Integer cases.
1750 locations->SetInAt(0, Location::RequiresRegister());
1751 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
1752 }
1753
Alexandre Rames5319def2014-10-23 10:03:10 +01001754 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001755 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001756 }
1757}
1758
1759void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1760 if (!instruction->NeedsMaterialization()) {
1761 return;
1762 }
1763
1764 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01001765 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00001766 IfCondition if_cond = instruction->GetCondition();
1767 Condition arm64_cond = ARM64Condition(if_cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001768
Roland Levillain7f63c522015-07-13 15:54:55 +00001769 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
1770 FPRegister lhs = InputFPRegisterAt(instruction, 0);
1771 if (locations->InAt(1).IsConstant()) {
1772 DCHECK(IsFloatingPointZeroConstant(locations->InAt(1).GetConstant()));
1773 // 0.0 is the only immediate that can be encoded directly in an FCMP instruction.
1774 __ Fcmp(lhs, 0.0);
1775 } else {
1776 __ Fcmp(lhs, InputFPRegisterAt(instruction, 1));
1777 }
1778 __ Cset(res, arm64_cond);
1779 if (instruction->IsFPConditionTrueIfNaN()) {
1780 // res = IsUnordered(arm64_cond) ? 1 : res <=> res = IsNotUnordered(arm64_cond) ? res : 1
1781 __ Csel(res, res, Operand(1), vc); // VC for "not unordered".
1782 } else if (instruction->IsFPConditionFalseIfNaN()) {
1783 // res = IsUnordered(arm64_cond) ? 0 : res <=> res = IsNotUnordered(arm64_cond) ? res : 0
1784 __ Csel(res, res, Operand(0), vc); // VC for "not unordered".
1785 }
1786 } else {
1787 // Integer cases.
1788 Register lhs = InputRegisterAt(instruction, 0);
1789 Operand rhs = InputOperandAt(instruction, 1);
1790 __ Cmp(lhs, rhs);
1791 __ Cset(res, arm64_cond);
1792 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001793}
1794
1795#define FOR_EACH_CONDITION_INSTRUCTION(M) \
1796 M(Equal) \
1797 M(NotEqual) \
1798 M(LessThan) \
1799 M(LessThanOrEqual) \
1800 M(GreaterThan) \
1801 M(GreaterThanOrEqual)
1802#define DEFINE_CONDITION_VISITORS(Name) \
1803void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
1804void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1805FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00001806#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01001807#undef FOR_EACH_CONDITION_INSTRUCTION
1808
Zheng Xuc6667102015-05-15 16:08:45 +08001809void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
1810 DCHECK(instruction->IsDiv() || instruction->IsRem());
1811
1812 LocationSummary* locations = instruction->GetLocations();
1813 Location second = locations->InAt(1);
1814 DCHECK(second.IsConstant());
1815
1816 Register out = OutputRegister(instruction);
1817 Register dividend = InputRegisterAt(instruction, 0);
1818 int64_t imm = Int64FromConstant(second.GetConstant());
1819 DCHECK(imm == 1 || imm == -1);
1820
1821 if (instruction->IsRem()) {
1822 __ Mov(out, 0);
1823 } else {
1824 if (imm == 1) {
1825 __ Mov(out, dividend);
1826 } else {
1827 __ Neg(out, dividend);
1828 }
1829 }
1830}
1831
1832void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
1833 DCHECK(instruction->IsDiv() || instruction->IsRem());
1834
1835 LocationSummary* locations = instruction->GetLocations();
1836 Location second = locations->InAt(1);
1837 DCHECK(second.IsConstant());
1838
1839 Register out = OutputRegister(instruction);
1840 Register dividend = InputRegisterAt(instruction, 0);
1841 int64_t imm = Int64FromConstant(second.GetConstant());
Vladimir Marko80afd022015-05-19 18:08:00 +01001842 uint64_t abs_imm = static_cast<uint64_t>(std::abs(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08001843 DCHECK(IsPowerOfTwo(abs_imm));
1844 int ctz_imm = CTZ(abs_imm);
1845
1846 UseScratchRegisterScope temps(GetVIXLAssembler());
1847 Register temp = temps.AcquireSameSizeAs(out);
1848
1849 if (instruction->IsDiv()) {
1850 __ Add(temp, dividend, abs_imm - 1);
1851 __ Cmp(dividend, 0);
1852 __ Csel(out, temp, dividend, lt);
1853 if (imm > 0) {
1854 __ Asr(out, out, ctz_imm);
1855 } else {
1856 __ Neg(out, Operand(out, ASR, ctz_imm));
1857 }
1858 } else {
1859 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
1860 __ Asr(temp, dividend, bits - 1);
1861 __ Lsr(temp, temp, bits - ctz_imm);
1862 __ Add(out, dividend, temp);
1863 __ And(out, out, abs_imm - 1);
1864 __ Sub(out, out, temp);
1865 }
1866}
1867
1868void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
1869 DCHECK(instruction->IsDiv() || instruction->IsRem());
1870
1871 LocationSummary* locations = instruction->GetLocations();
1872 Location second = locations->InAt(1);
1873 DCHECK(second.IsConstant());
1874
1875 Register out = OutputRegister(instruction);
1876 Register dividend = InputRegisterAt(instruction, 0);
1877 int64_t imm = Int64FromConstant(second.GetConstant());
1878
1879 Primitive::Type type = instruction->GetResultType();
1880 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1881
1882 int64_t magic;
1883 int shift;
1884 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
1885
1886 UseScratchRegisterScope temps(GetVIXLAssembler());
1887 Register temp = temps.AcquireSameSizeAs(out);
1888
1889 // temp = get_high(dividend * magic)
1890 __ Mov(temp, magic);
1891 if (type == Primitive::kPrimLong) {
1892 __ Smulh(temp, dividend, temp);
1893 } else {
1894 __ Smull(temp.X(), dividend, temp);
1895 __ Lsr(temp.X(), temp.X(), 32);
1896 }
1897
1898 if (imm > 0 && magic < 0) {
1899 __ Add(temp, temp, dividend);
1900 } else if (imm < 0 && magic > 0) {
1901 __ Sub(temp, temp, dividend);
1902 }
1903
1904 if (shift != 0) {
1905 __ Asr(temp, temp, shift);
1906 }
1907
1908 if (instruction->IsDiv()) {
1909 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
1910 } else {
1911 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
1912 // TODO: Strength reduction for msub.
1913 Register temp_imm = temps.AcquireSameSizeAs(out);
1914 __ Mov(temp_imm, imm);
1915 __ Msub(out, temp, temp_imm, dividend);
1916 }
1917}
1918
1919void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
1920 DCHECK(instruction->IsDiv() || instruction->IsRem());
1921 Primitive::Type type = instruction->GetResultType();
1922 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
1923
1924 LocationSummary* locations = instruction->GetLocations();
1925 Register out = OutputRegister(instruction);
1926 Location second = locations->InAt(1);
1927
1928 if (second.IsConstant()) {
1929 int64_t imm = Int64FromConstant(second.GetConstant());
1930
1931 if (imm == 0) {
1932 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
1933 } else if (imm == 1 || imm == -1) {
1934 DivRemOneOrMinusOne(instruction);
1935 } else if (IsPowerOfTwo(std::abs(imm))) {
1936 DivRemByPowerOfTwo(instruction);
1937 } else {
1938 DCHECK(imm <= -2 || imm >= 2);
1939 GenerateDivRemWithAnyConstant(instruction);
1940 }
1941 } else {
1942 Register dividend = InputRegisterAt(instruction, 0);
1943 Register divisor = InputRegisterAt(instruction, 1);
1944 if (instruction->IsDiv()) {
1945 __ Sdiv(out, dividend, divisor);
1946 } else {
1947 UseScratchRegisterScope temps(GetVIXLAssembler());
1948 Register temp = temps.AcquireSameSizeAs(out);
1949 __ Sdiv(temp, dividend, divisor);
1950 __ Msub(out, temp, divisor, dividend);
1951 }
1952 }
1953}
1954
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001955void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1956 LocationSummary* locations =
1957 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1958 switch (div->GetResultType()) {
1959 case Primitive::kPrimInt:
1960 case Primitive::kPrimLong:
1961 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08001962 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001963 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1964 break;
1965
1966 case Primitive::kPrimFloat:
1967 case Primitive::kPrimDouble:
1968 locations->SetInAt(0, Location::RequiresFpuRegister());
1969 locations->SetInAt(1, Location::RequiresFpuRegister());
1970 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1971 break;
1972
1973 default:
1974 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1975 }
1976}
1977
1978void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1979 Primitive::Type type = div->GetResultType();
1980 switch (type) {
1981 case Primitive::kPrimInt:
1982 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08001983 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001984 break;
1985
1986 case Primitive::kPrimFloat:
1987 case Primitive::kPrimDouble:
1988 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1989 break;
1990
1991 default:
1992 LOG(FATAL) << "Unexpected div type " << type;
1993 }
1994}
1995
Alexandre Rames67555f72014-11-18 10:55:16 +00001996void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00001997 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
1998 ? LocationSummary::kCallOnSlowPath
1999 : LocationSummary::kNoCall;
2000 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002001 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2002 if (instruction->HasUses()) {
2003 locations->SetOut(Location::SameAsFirstInput());
2004 }
2005}
2006
2007void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2008 SlowPathCodeARM64* slow_path =
2009 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2010 codegen_->AddSlowPath(slow_path);
2011 Location value = instruction->GetLocations()->InAt(0);
2012
Alexandre Rames3e69f162014-12-10 10:36:50 +00002013 Primitive::Type type = instruction->GetType();
2014
Serguei Katkov8c0676c2015-08-03 13:55:33 +06002015 if ((type == Primitive::kPrimBoolean) || !Primitive::IsIntegralType(type)) {
2016 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002017 return;
2018 }
2019
Alexandre Rames67555f72014-11-18 10:55:16 +00002020 if (value.IsConstant()) {
2021 int64_t divisor = Int64ConstantFrom(value);
2022 if (divisor == 0) {
2023 __ B(slow_path->GetEntryLabel());
2024 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002025 // A division by a non-null constant is valid. We don't need to perform
2026 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002027 }
2028 } else {
2029 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2030 }
2031}
2032
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002033void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2034 LocationSummary* locations =
2035 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2036 locations->SetOut(Location::ConstantLocation(constant));
2037}
2038
2039void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2040 UNUSED(constant);
2041 // Will be generated at use site.
2042}
2043
Alexandre Rames5319def2014-10-23 10:03:10 +01002044void LocationsBuilderARM64::VisitExit(HExit* exit) {
2045 exit->SetLocations(nullptr);
2046}
2047
2048void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002049 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +01002050}
2051
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002052void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2053 LocationSummary* locations =
2054 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2055 locations->SetOut(Location::ConstantLocation(constant));
2056}
2057
2058void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
2059 UNUSED(constant);
2060 // Will be generated at use site.
2061}
2062
David Brazdilfc6a86a2015-06-26 10:33:45 +00002063void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002064 DCHECK(!successor->IsExitBlock());
2065 HBasicBlock* block = got->GetBlock();
2066 HInstruction* previous = got->GetPrevious();
2067 HLoopInformation* info = block->GetLoopInformation();
2068
David Brazdil46e2a392015-03-16 17:31:52 +00002069 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002070 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2071 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2072 return;
2073 }
2074 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2075 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2076 }
2077 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002078 __ B(codegen_->GetLabelOf(successor));
2079 }
2080}
2081
David Brazdilfc6a86a2015-06-26 10:33:45 +00002082void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2083 got->SetLocations(nullptr);
2084}
2085
2086void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2087 HandleGoto(got, got->GetSuccessor());
2088}
2089
2090void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2091 try_boundary->SetLocations(nullptr);
2092}
2093
2094void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2095 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2096 if (!successor->IsExitBlock()) {
2097 HandleGoto(try_boundary, successor);
2098 }
2099}
2100
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002101void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
2102 vixl::Label* true_target,
2103 vixl::Label* false_target,
2104 vixl::Label* always_true_target) {
2105 HInstruction* cond = instruction->InputAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01002106 HCondition* condition = cond->AsCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002107
Serban Constantinescu02164b32014-11-13 14:05:07 +00002108 if (cond->IsIntConstant()) {
2109 int32_t cond_value = cond->AsIntConstant()->GetValue();
2110 if (cond_value == 1) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002111 if (always_true_target != nullptr) {
2112 __ B(always_true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002113 }
2114 return;
2115 } else {
2116 DCHECK_EQ(cond_value, 0);
2117 }
2118 } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002119 // The condition instruction has been materialized, compare the output to 0.
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002120 Location cond_val = instruction->GetLocations()->InAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01002121 DCHECK(cond_val.IsRegister());
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002122 __ Cbnz(InputRegisterAt(instruction, 0), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01002123 } else {
2124 // The condition instruction has not been materialized, use its inputs as
2125 // the comparison and its condition as the branch condition.
Roland Levillain7f63c522015-07-13 15:54:55 +00002126 Primitive::Type type =
2127 cond->IsCondition() ? cond->InputAt(0)->GetType() : Primitive::kPrimInt;
2128
2129 if (Primitive::IsFloatingPointType(type)) {
2130 // FP compares don't like null false_targets.
2131 if (false_target == nullptr) {
2132 false_target = codegen_->GetLabelOf(instruction->AsIf()->IfFalseSuccessor());
Alexandre Rames5319def2014-10-23 10:03:10 +01002133 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002134 FPRegister lhs = InputFPRegisterAt(condition, 0);
2135 if (condition->GetLocations()->InAt(1).IsConstant()) {
2136 DCHECK(IsFloatingPointZeroConstant(condition->GetLocations()->InAt(1).GetConstant()));
2137 // 0.0 is the only immediate that can be encoded directly in an FCMP instruction.
2138 __ Fcmp(lhs, 0.0);
2139 } else {
2140 __ Fcmp(lhs, InputFPRegisterAt(condition, 1));
2141 }
2142 if (condition->IsFPConditionTrueIfNaN()) {
2143 __ B(vs, true_target); // VS for unordered.
2144 } else if (condition->IsFPConditionFalseIfNaN()) {
2145 __ B(vs, false_target); // VS for unordered.
2146 }
2147 __ B(ARM64Condition(condition->GetCondition()), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01002148 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002149 // Integer cases.
2150 Register lhs = InputRegisterAt(condition, 0);
2151 Operand rhs = InputOperandAt(condition, 1);
2152 Condition arm64_cond = ARM64Condition(condition->GetCondition());
2153 if ((arm64_cond != gt && arm64_cond != le) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
2154 switch (arm64_cond) {
2155 case eq:
2156 __ Cbz(lhs, true_target);
2157 break;
2158 case ne:
2159 __ Cbnz(lhs, true_target);
2160 break;
2161 case lt:
2162 // Test the sign bit and branch accordingly.
2163 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, true_target);
2164 break;
2165 case ge:
2166 // Test the sign bit and branch accordingly.
2167 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, true_target);
2168 break;
2169 default:
2170 // Without the `static_cast` the compiler throws an error for
2171 // `-Werror=sign-promo`.
2172 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2173 }
2174 } else {
2175 __ Cmp(lhs, rhs);
2176 __ B(arm64_cond, true_target);
2177 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002178 }
2179 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002180 if (false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002181 __ B(false_target);
2182 }
2183}
2184
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002185void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2186 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
2187 HInstruction* cond = if_instr->InputAt(0);
2188 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
2189 locations->SetInAt(0, Location::RequiresRegister());
2190 }
2191}
2192
2193void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
2194 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
2195 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
2196 vixl::Label* always_true_target = true_target;
2197 if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
2198 if_instr->IfTrueSuccessor())) {
2199 always_true_target = nullptr;
2200 }
2201 if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
2202 if_instr->IfFalseSuccessor())) {
2203 false_target = nullptr;
2204 }
2205 GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
2206}
2207
2208void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2209 LocationSummary* locations = new (GetGraph()->GetArena())
2210 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
2211 HInstruction* cond = deoptimize->InputAt(0);
2212 DCHECK(cond->IsCondition());
2213 if (cond->AsCondition()->NeedsMaterialization()) {
2214 locations->SetInAt(0, Location::RequiresRegister());
2215 }
2216}
2217
2218void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2219 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
2220 DeoptimizationSlowPathARM64(deoptimize);
2221 codegen_->AddSlowPath(slow_path);
2222 vixl::Label* slow_path_entry = slow_path->GetEntryLabel();
2223 GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
2224}
2225
Alexandre Rames5319def2014-10-23 10:03:10 +01002226void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002227 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002228}
2229
2230void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002231 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01002232}
2233
2234void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002235 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002236}
2237
2238void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002239 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01002240}
2241
Alexandre Rames67555f72014-11-18 10:55:16 +00002242void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002243 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2244 switch (instruction->GetTypeCheckKind()) {
2245 case TypeCheckKind::kExactCheck:
2246 case TypeCheckKind::kAbstractClassCheck:
2247 case TypeCheckKind::kClassHierarchyCheck:
2248 case TypeCheckKind::kArrayObjectCheck:
2249 call_kind = LocationSummary::kNoCall;
2250 break;
2251 case TypeCheckKind::kInterfaceCheck:
2252 call_kind = LocationSummary::kCall;
2253 break;
2254 case TypeCheckKind::kArrayCheck:
2255 call_kind = LocationSummary::kCallOnSlowPath;
2256 break;
2257 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002258 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002259 if (call_kind != LocationSummary::kCall) {
2260 locations->SetInAt(0, Location::RequiresRegister());
2261 locations->SetInAt(1, Location::RequiresRegister());
2262 // The out register is used as a temporary, so it overlaps with the inputs.
2263 // Note that TypeCheckSlowPathARM64 uses this register too.
2264 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2265 } else {
2266 InvokeRuntimeCallingConvention calling_convention;
2267 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(0)));
2268 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2269 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
2270 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002271}
2272
2273void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
2274 LocationSummary* locations = instruction->GetLocations();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002275 Register obj = InputRegisterAt(instruction, 0);
2276 Register cls = InputRegisterAt(instruction, 1);
Alexandre Rames67555f72014-11-18 10:55:16 +00002277 Register out = OutputRegister(instruction);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002278 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2279 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2280 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2281 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00002282
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002283 vixl::Label done, zero;
2284 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00002285
2286 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01002287 // Avoid null check if we know `obj` is not null.
2288 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002289 __ Cbz(obj, &zero);
2290 }
2291
2292 // In case of an interface check, we put the object class into the object register.
2293 // This is safe, as the register is caller-save, and the object must be in another
2294 // register if it survives the runtime call.
2295 Register target = (instruction->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck)
2296 ? obj
2297 : out;
2298 __ Ldr(target, HeapOperand(obj.W(), class_offset));
2299 GetAssembler()->MaybeUnpoisonHeapReference(target);
2300
2301 switch (instruction->GetTypeCheckKind()) {
2302 case TypeCheckKind::kExactCheck: {
2303 __ Cmp(out, cls);
2304 __ Cset(out, eq);
2305 if (zero.IsLinked()) {
2306 __ B(&done);
2307 }
2308 break;
2309 }
2310 case TypeCheckKind::kAbstractClassCheck: {
2311 // If the class is abstract, we eagerly fetch the super class of the
2312 // object to avoid doing a comparison we know will fail.
2313 vixl::Label loop, success;
2314 __ Bind(&loop);
2315 __ Ldr(out, HeapOperand(out, super_offset));
2316 GetAssembler()->MaybeUnpoisonHeapReference(out);
2317 // If `out` is null, we use it for the result, and jump to `done`.
2318 __ Cbz(out, &done);
2319 __ Cmp(out, cls);
2320 __ B(ne, &loop);
2321 __ Mov(out, 1);
2322 if (zero.IsLinked()) {
2323 __ B(&done);
2324 }
2325 break;
2326 }
2327 case TypeCheckKind::kClassHierarchyCheck: {
2328 // Walk over the class hierarchy to find a match.
2329 vixl::Label loop, success;
2330 __ Bind(&loop);
2331 __ Cmp(out, cls);
2332 __ B(eq, &success);
2333 __ Ldr(out, HeapOperand(out, super_offset));
2334 GetAssembler()->MaybeUnpoisonHeapReference(out);
2335 __ Cbnz(out, &loop);
2336 // If `out` is null, we use it for the result, and jump to `done`.
2337 __ B(&done);
2338 __ Bind(&success);
2339 __ Mov(out, 1);
2340 if (zero.IsLinked()) {
2341 __ B(&done);
2342 }
2343 break;
2344 }
2345 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01002346 // Do an exact check.
2347 vixl::Label exact_check;
2348 __ Cmp(out, cls);
2349 __ B(eq, &exact_check);
2350 // Otherwise, we need to check that the object's class is a non primitive array.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002351 __ Ldr(out, HeapOperand(out, component_offset));
2352 GetAssembler()->MaybeUnpoisonHeapReference(out);
2353 // If `out` is null, we use it for the result, and jump to `done`.
2354 __ Cbz(out, &done);
2355 __ Ldrh(out, HeapOperand(out, primitive_offset));
2356 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2357 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01002358 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002359 __ Mov(out, 1);
2360 __ B(&done);
2361 break;
2362 }
2363 case TypeCheckKind::kArrayCheck: {
2364 __ Cmp(out, cls);
2365 DCHECK(locations->OnlyCallsOnSlowPath());
2366 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
2367 instruction, /* is_fatal */ false);
2368 codegen_->AddSlowPath(slow_path);
2369 __ B(ne, slow_path->GetEntryLabel());
2370 __ Mov(out, 1);
2371 if (zero.IsLinked()) {
2372 __ B(&done);
2373 }
2374 break;
2375 }
2376
2377 case TypeCheckKind::kInterfaceCheck:
2378 default: {
2379 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
2380 instruction,
2381 instruction->GetDexPc(),
2382 nullptr);
2383 if (zero.IsLinked()) {
2384 __ B(&done);
2385 }
2386 break;
2387 }
2388 }
2389
2390 if (zero.IsLinked()) {
2391 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01002392 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002393 }
2394
2395 if (done.IsLinked()) {
2396 __ Bind(&done);
2397 }
2398
2399 if (slow_path != nullptr) {
2400 __ Bind(slow_path->GetExitLabel());
2401 }
2402}
2403
2404void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
2405 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2406 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
2407
2408 switch (instruction->GetTypeCheckKind()) {
2409 case TypeCheckKind::kExactCheck:
2410 case TypeCheckKind::kAbstractClassCheck:
2411 case TypeCheckKind::kClassHierarchyCheck:
2412 case TypeCheckKind::kArrayObjectCheck:
2413 call_kind = throws_into_catch
2414 ? LocationSummary::kCallOnSlowPath
2415 : LocationSummary::kNoCall;
2416 break;
2417 case TypeCheckKind::kInterfaceCheck:
2418 call_kind = LocationSummary::kCall;
2419 break;
2420 case TypeCheckKind::kArrayCheck:
2421 call_kind = LocationSummary::kCallOnSlowPath;
2422 break;
2423 }
2424
2425 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2426 instruction, call_kind);
2427 if (call_kind != LocationSummary::kCall) {
2428 locations->SetInAt(0, Location::RequiresRegister());
2429 locations->SetInAt(1, Location::RequiresRegister());
2430 // Note that TypeCheckSlowPathARM64 uses this register too.
2431 locations->AddTemp(Location::RequiresRegister());
2432 } else {
2433 InvokeRuntimeCallingConvention calling_convention;
2434 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(0)));
2435 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2436 }
2437}
2438
2439void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
2440 LocationSummary* locations = instruction->GetLocations();
2441 Register obj = InputRegisterAt(instruction, 0);
2442 Register cls = InputRegisterAt(instruction, 1);
2443 Register temp;
2444 if (!locations->WillCall()) {
2445 temp = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
2446 }
2447
2448 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2449 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2450 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2451 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2452 SlowPathCodeARM64* slow_path = nullptr;
2453
2454 if (!locations->WillCall()) {
2455 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
2456 instruction, !locations->CanCall());
2457 codegen_->AddSlowPath(slow_path);
2458 }
2459
2460 vixl::Label done;
2461 // Avoid null check if we know obj is not null.
2462 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01002463 __ Cbz(obj, &done);
2464 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002465
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002466 if (locations->WillCall()) {
2467 __ Ldr(obj, HeapOperand(obj, class_offset));
2468 GetAssembler()->MaybeUnpoisonHeapReference(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00002469 } else {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002470 __ Ldr(temp, HeapOperand(obj, class_offset));
2471 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Nicolas Geoffray64acf302015-09-14 22:20:29 +01002472 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00002473
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002474 switch (instruction->GetTypeCheckKind()) {
2475 case TypeCheckKind::kExactCheck:
2476 case TypeCheckKind::kArrayCheck: {
2477 __ Cmp(temp, cls);
2478 // Jump to slow path for throwing the exception or doing a
2479 // more involved array check.
2480 __ B(ne, slow_path->GetEntryLabel());
2481 break;
2482 }
2483 case TypeCheckKind::kAbstractClassCheck: {
2484 // If the class is abstract, we eagerly fetch the super class of the
2485 // object to avoid doing a comparison we know will fail.
2486 vixl::Label loop;
2487 __ Bind(&loop);
2488 __ Ldr(temp, HeapOperand(temp, super_offset));
2489 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2490 // Jump to the slow path to throw the exception.
2491 __ Cbz(temp, slow_path->GetEntryLabel());
2492 __ Cmp(temp, cls);
2493 __ B(ne, &loop);
2494 break;
2495 }
2496 case TypeCheckKind::kClassHierarchyCheck: {
2497 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01002498 vixl::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002499 __ Bind(&loop);
2500 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01002501 __ B(eq, &done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002502 __ Ldr(temp, HeapOperand(temp, super_offset));
2503 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2504 __ Cbnz(temp, &loop);
2505 // Jump to the slow path to throw the exception.
2506 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002507 break;
2508 }
2509 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01002510 // Do an exact check.
2511 __ Cmp(temp, cls);
2512 __ B(eq, &done);
2513 // Otherwise, we need to check that the object's class is a non primitive array.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002514 __ Ldr(temp, HeapOperand(temp, component_offset));
2515 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2516 __ Cbz(temp, slow_path->GetEntryLabel());
2517 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
2518 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2519 __ Cbnz(temp, slow_path->GetEntryLabel());
2520 break;
2521 }
2522 case TypeCheckKind::kInterfaceCheck:
2523 default:
2524 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
2525 instruction,
2526 instruction->GetDexPc(),
2527 nullptr);
2528 break;
2529 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00002530 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002531
2532 if (slow_path != nullptr) {
2533 __ Bind(slow_path->GetExitLabel());
2534 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002535}
2536
Alexandre Rames5319def2014-10-23 10:03:10 +01002537void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
2538 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2539 locations->SetOut(Location::ConstantLocation(constant));
2540}
2541
2542void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
2543 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002544 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01002545}
2546
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002547void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
2548 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2549 locations->SetOut(Location::ConstantLocation(constant));
2550}
2551
2552void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant) {
2553 // Will be generated at use site.
2554 UNUSED(constant);
2555}
2556
Calin Juravle175dc732015-08-25 15:42:32 +01002557void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2558 // The trampoline uses the same calling convention as dex calling conventions,
2559 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2560 // the method_idx.
2561 HandleInvoke(invoke);
2562}
2563
2564void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2565 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2566}
2567
Alexandre Rames5319def2014-10-23 10:03:10 +01002568void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002569 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002570 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01002571}
2572
Alexandre Rames67555f72014-11-18 10:55:16 +00002573void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
2574 HandleInvoke(invoke);
2575}
2576
2577void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
2578 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002579 Register temp = XRegisterFrom(invoke->GetLocations()->GetTemp(0));
2580 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
2581 invoke->GetImtIndex() % mirror::Class::kImtSize, kArm64PointerSize).Uint32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00002582 Location receiver = invoke->GetLocations()->InAt(0);
2583 Offset class_offset = mirror::Object::ClassOffset();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002584 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00002585
2586 // The register ip1 is required to be used for the hidden argument in
2587 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002588 MacroAssembler* masm = GetVIXLAssembler();
2589 UseScratchRegisterScope scratch_scope(masm);
2590 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00002591 scratch_scope.Exclude(ip1);
2592 __ Mov(ip1, invoke->GetDexMethodIndex());
2593
2594 // temp = object->GetClass();
2595 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002596 __ Ldr(temp.W(), StackOperandFrom(receiver));
2597 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00002598 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002599 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00002600 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002601 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain4d027112015-07-01 15:41:14 +01002602 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Alexandre Rames67555f72014-11-18 10:55:16 +00002603 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002604 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00002605 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002606 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00002607 // lr();
2608 __ Blr(lr);
2609 DCHECK(!codegen_->IsLeafMethod());
2610 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2611}
2612
2613void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08002614 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
2615 if (intrinsic.TryDispatch(invoke)) {
2616 return;
2617 }
2618
Alexandre Rames67555f72014-11-18 10:55:16 +00002619 HandleInvoke(invoke);
2620}
2621
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002622void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain3e3d7332015-04-28 11:00:54 +01002623 // When we do not run baseline, explicit clinit checks triggered by static
2624 // invokes must have been pruned by art::PrepareForRegisterAllocation.
2625 DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002626
Andreas Gampe878d58c2015-01-15 23:24:00 -08002627 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
2628 if (intrinsic.TryDispatch(invoke)) {
2629 return;
2630 }
2631
Alexandre Rames67555f72014-11-18 10:55:16 +00002632 HandleInvoke(invoke);
2633}
2634
Andreas Gampe878d58c2015-01-15 23:24:00 -08002635static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
2636 if (invoke->GetLocations()->Intrinsified()) {
2637 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
2638 intrinsic.Dispatch(invoke);
2639 return true;
2640 }
2641 return false;
2642}
2643
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002644void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00002645 // For better instruction scheduling we load the direct code pointer before the method pointer.
2646 bool direct_code_loaded = false;
2647 switch (invoke->GetCodePtrLocation()) {
2648 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
2649 // LR = code address from literal pool with link-time patch.
2650 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
2651 direct_code_loaded = true;
2652 break;
2653 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
2654 // LR = invoke->GetDirectCodePtr();
2655 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
2656 direct_code_loaded = true;
2657 break;
2658 default:
2659 break;
2660 }
2661
Andreas Gampe878d58c2015-01-15 23:24:00 -08002662 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00002663 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
2664 switch (invoke->GetMethodLoadKind()) {
2665 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
2666 // temp = thread->string_init_entrypoint
2667 __ Ldr(XRegisterFrom(temp).X(), MemOperand(tr, invoke->GetStringInitOffset()));
2668 break;
2669 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
2670 callee_method = invoke->GetLocations()->InAt(invoke->GetCurrentMethodInputIndex());
2671 break;
2672 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
2673 // Load method address from literal pool.
2674 __ Ldr(XRegisterFrom(temp).X(), DeduplicateUint64Literal(invoke->GetMethodAddress()));
2675 break;
2676 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
2677 // Load method address from literal pool with a link-time patch.
2678 __ Ldr(XRegisterFrom(temp).X(),
2679 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
2680 break;
2681 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
2682 // Add ADRP with its PC-relative DexCache access patch.
2683 pc_rel_dex_cache_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
2684 invoke->GetDexCacheArrayOffset());
2685 vixl::Label* pc_insn_label = &pc_rel_dex_cache_patches_.back().label;
2686 {
2687 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
2688 __ adrp(XRegisterFrom(temp).X(), 0);
2689 }
2690 __ Bind(pc_insn_label); // Bind after ADRP.
2691 pc_rel_dex_cache_patches_.back().pc_insn_label = pc_insn_label;
2692 // Add LDR with its PC-relative DexCache access patch.
2693 pc_rel_dex_cache_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
2694 invoke->GetDexCacheArrayOffset());
2695 __ Ldr(XRegisterFrom(temp).X(), MemOperand(XRegisterFrom(temp).X(), 0));
2696 __ Bind(&pc_rel_dex_cache_patches_.back().label); // Bind after LDR.
2697 pc_rel_dex_cache_patches_.back().pc_insn_label = pc_insn_label;
2698 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01002699 }
Vladimir Marko58155012015-08-19 12:49:41 +00002700 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
2701 Location current_method = invoke->GetLocations()->InAt(invoke->GetCurrentMethodInputIndex());
2702 Register reg = XRegisterFrom(temp);
2703 Register method_reg;
2704 if (current_method.IsRegister()) {
2705 method_reg = XRegisterFrom(current_method);
2706 } else {
2707 DCHECK(invoke->GetLocations()->Intrinsified());
2708 DCHECK(!current_method.IsValid());
2709 method_reg = reg;
2710 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
2711 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00002712
Vladimir Marko58155012015-08-19 12:49:41 +00002713 // temp = current_method->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01002714 __ Ldr(reg.X(),
2715 MemOperand(method_reg.X(),
2716 ArtMethod::DexCacheResolvedMethodsOffset(kArm64WordSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00002717 // temp = temp[index_in_cache];
2718 uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index;
2719 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
2720 break;
2721 }
2722 }
2723
2724 switch (invoke->GetCodePtrLocation()) {
2725 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
2726 __ Bl(&frame_entry_label_);
2727 break;
2728 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
2729 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
2730 vixl::Label* label = &relative_call_patches_.back().label;
2731 __ Bl(label); // Arbitrarily branch to the instruction after BL, override at link time.
2732 __ Bind(label); // Bind after BL.
2733 break;
2734 }
2735 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
2736 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
2737 // LR prepared above for better instruction scheduling.
2738 DCHECK(direct_code_loaded);
2739 // lr()
2740 __ Blr(lr);
2741 break;
2742 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
2743 // LR = callee_method->entry_point_from_quick_compiled_code_;
2744 __ Ldr(lr, MemOperand(
2745 XRegisterFrom(callee_method).X(),
2746 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value()));
2747 // lr()
2748 __ Blr(lr);
2749 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002750 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002751
Andreas Gampe878d58c2015-01-15 23:24:00 -08002752 DCHECK(!IsLeafMethod());
2753}
2754
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002755void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
2756 LocationSummary* locations = invoke->GetLocations();
2757 Location receiver = locations->InAt(0);
2758 Register temp = XRegisterFrom(temp_in);
2759 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
2760 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
2761 Offset class_offset = mirror::Object::ClassOffset();
2762 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
2763
2764 BlockPoolsScope block_pools(GetVIXLAssembler());
2765
2766 DCHECK(receiver.IsRegister());
2767 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
2768 MaybeRecordImplicitNullCheck(invoke);
2769 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
2770 // temp = temp->GetMethodAt(method_offset);
2771 __ Ldr(temp, MemOperand(temp, method_offset));
2772 // lr = temp->GetEntryPoint();
2773 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
2774 // lr();
2775 __ Blr(lr);
2776}
2777
Vladimir Marko58155012015-08-19 12:49:41 +00002778void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
2779 DCHECK(linker_patches->empty());
2780 size_t size =
2781 method_patches_.size() +
2782 call_patches_.size() +
2783 relative_call_patches_.size() +
2784 pc_rel_dex_cache_patches_.size();
2785 linker_patches->reserve(size);
2786 for (const auto& entry : method_patches_) {
2787 const MethodReference& target_method = entry.first;
2788 vixl::Literal<uint64_t>* literal = entry.second;
2789 linker_patches->push_back(LinkerPatch::MethodPatch(literal->offset(),
2790 target_method.dex_file,
2791 target_method.dex_method_index));
2792 }
2793 for (const auto& entry : call_patches_) {
2794 const MethodReference& target_method = entry.first;
2795 vixl::Literal<uint64_t>* literal = entry.second;
2796 linker_patches->push_back(LinkerPatch::CodePatch(literal->offset(),
2797 target_method.dex_file,
2798 target_method.dex_method_index));
2799 }
2800 for (const MethodPatchInfo<vixl::Label>& info : relative_call_patches_) {
2801 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.location() - 4u,
2802 info.target_method.dex_file,
2803 info.target_method.dex_method_index));
2804 }
2805 for (const PcRelativeDexCacheAccessInfo& info : pc_rel_dex_cache_patches_) {
2806 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.location() - 4u,
2807 &info.target_dex_file,
2808 info.pc_insn_label->location() - 4u,
2809 info.element_offset));
2810 }
2811}
2812
2813vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
2814 // Look up the literal for value.
2815 auto lb = uint64_literals_.lower_bound(value);
2816 if (lb != uint64_literals_.end() && !uint64_literals_.key_comp()(value, lb->first)) {
2817 return lb->second;
2818 }
2819 // We don't have a literal for this value, insert a new one.
2820 vixl::Literal<uint64_t>* literal = __ CreateLiteralDestroyedWithPool<uint64_t>(value);
2821 uint64_literals_.PutBefore(lb, value, literal);
2822 return literal;
2823}
2824
2825vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
2826 MethodReference target_method,
2827 MethodToLiteralMap* map) {
2828 // Look up the literal for target_method.
2829 auto lb = map->lower_bound(target_method);
2830 if (lb != map->end() && !map->key_comp()(target_method, lb->first)) {
2831 return lb->second;
2832 }
2833 // We don't have a literal for this method yet, insert a new one.
2834 vixl::Literal<uint64_t>* literal = __ CreateLiteralDestroyedWithPool<uint64_t>(0u);
2835 map->PutBefore(lb, target_method, literal);
2836 return literal;
2837}
2838
2839vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
2840 MethodReference target_method) {
2841 return DeduplicateMethodLiteral(target_method, &method_patches_);
2842}
2843
2844vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
2845 MethodReference target_method) {
2846 return DeduplicateMethodLiteral(target_method, &call_patches_);
2847}
2848
2849
Andreas Gampe878d58c2015-01-15 23:24:00 -08002850void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain3e3d7332015-04-28 11:00:54 +01002851 // When we do not run baseline, explicit clinit checks triggered by static
2852 // invokes must have been pruned by art::PrepareForRegisterAllocation.
2853 DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002854
Andreas Gampe878d58c2015-01-15 23:24:00 -08002855 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2856 return;
2857 }
2858
Alexandre Ramesd921d642015-04-16 15:07:16 +01002859 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002860 LocationSummary* locations = invoke->GetLocations();
2861 codegen_->GenerateStaticOrDirectCall(
2862 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002863 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01002864}
2865
2866void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08002867 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2868 return;
2869 }
2870
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002871 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002872 DCHECK(!codegen_->IsLeafMethod());
2873 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2874}
2875
Alexandre Rames67555f72014-11-18 10:55:16 +00002876void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
2877 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
2878 : LocationSummary::kNoCall;
2879 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01002880 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002881 locations->SetOut(Location::RequiresRegister());
2882}
2883
2884void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
2885 Register out = OutputRegister(cls);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01002886 Register current_method = InputRegisterAt(cls, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00002887 if (cls->IsReferrersClass()) {
2888 DCHECK(!cls->CanCallRuntime());
2889 DCHECK(!cls->MustGenerateClinitCheck());
Mathieu Chartiere401d142015-04-22 13:56:20 -07002890 __ Ldr(out, MemOperand(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00002891 } else {
2892 DCHECK(cls->CanCallRuntime());
Vladimir Marko05792b92015-08-03 11:56:49 +01002893 MemberOffset resolved_types_offset = ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
2894 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
2895 __ Ldr(out, MemOperand(out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
2896 // TODO: We will need a read barrier here.
Alexandre Rames67555f72014-11-18 10:55:16 +00002897
2898 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2899 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2900 codegen_->AddSlowPath(slow_path);
2901 __ Cbz(out, slow_path->GetEntryLabel());
2902 if (cls->MustGenerateClinitCheck()) {
2903 GenerateClassInitializationCheck(slow_path, out);
2904 } else {
2905 __ Bind(slow_path->GetExitLabel());
2906 }
2907 }
2908}
2909
David Brazdilcb1c0552015-08-04 16:22:25 +01002910static MemOperand GetExceptionTlsAddress() {
2911 return MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
2912}
2913
Alexandre Rames67555f72014-11-18 10:55:16 +00002914void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
2915 LocationSummary* locations =
2916 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2917 locations->SetOut(Location::RequiresRegister());
2918}
2919
2920void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01002921 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
2922}
2923
2924void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
2925 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
2926}
2927
2928void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
2929 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00002930}
2931
Alexandre Rames5319def2014-10-23 10:03:10 +01002932void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
2933 load->SetLocations(nullptr);
2934}
2935
2936void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
2937 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002938 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01002939}
2940
Alexandre Rames67555f72014-11-18 10:55:16 +00002941void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
2942 LocationSummary* locations =
2943 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01002944 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002945 locations->SetOut(Location::RequiresRegister());
2946}
2947
2948void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
2949 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
2950 codegen_->AddSlowPath(slow_path);
2951
2952 Register out = OutputRegister(load);
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01002953 Register current_method = InputRegisterAt(load, 0);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002954 __ Ldr(out, MemOperand(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
Vladimir Marko05792b92015-08-03 11:56:49 +01002955 __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
2956 __ Ldr(out, MemOperand(out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex())));
2957 // TODO: We will need a read barrier here.
Alexandre Rames67555f72014-11-18 10:55:16 +00002958 __ Cbz(out, slow_path->GetEntryLabel());
2959 __ Bind(slow_path->GetExitLabel());
2960}
2961
Alexandre Rames5319def2014-10-23 10:03:10 +01002962void LocationsBuilderARM64::VisitLocal(HLocal* local) {
2963 local->SetLocations(nullptr);
2964}
2965
2966void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
2967 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
2968}
2969
2970void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
2971 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2972 locations->SetOut(Location::ConstantLocation(constant));
2973}
2974
2975void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
2976 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002977 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01002978}
2979
Alexandre Rames67555f72014-11-18 10:55:16 +00002980void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2981 LocationSummary* locations =
2982 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2983 InvokeRuntimeCallingConvention calling_convention;
2984 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2985}
2986
2987void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2988 codegen_->InvokeRuntime(instruction->IsEnter()
2989 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2990 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002991 instruction->GetDexPc(),
2992 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002993 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002994}
2995
Alexandre Rames42d641b2014-10-27 14:00:51 +00002996void LocationsBuilderARM64::VisitMul(HMul* mul) {
2997 LocationSummary* locations =
2998 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2999 switch (mul->GetResultType()) {
3000 case Primitive::kPrimInt:
3001 case Primitive::kPrimLong:
3002 locations->SetInAt(0, Location::RequiresRegister());
3003 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00003004 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00003005 break;
3006
3007 case Primitive::kPrimFloat:
3008 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003009 locations->SetInAt(0, Location::RequiresFpuRegister());
3010 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00003011 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00003012 break;
3013
3014 default:
3015 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
3016 }
3017}
3018
3019void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
3020 switch (mul->GetResultType()) {
3021 case Primitive::kPrimInt:
3022 case Primitive::kPrimLong:
3023 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
3024 break;
3025
3026 case Primitive::kPrimFloat:
3027 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003028 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00003029 break;
3030
3031 default:
3032 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
3033 }
3034}
3035
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003036void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
3037 LocationSummary* locations =
3038 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
3039 switch (neg->GetResultType()) {
3040 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00003041 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00003042 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00003043 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003044 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003045
3046 case Primitive::kPrimFloat:
3047 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00003048 locations->SetInAt(0, Location::RequiresFpuRegister());
3049 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003050 break;
3051
3052 default:
3053 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3054 }
3055}
3056
3057void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
3058 switch (neg->GetResultType()) {
3059 case Primitive::kPrimInt:
3060 case Primitive::kPrimLong:
3061 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
3062 break;
3063
3064 case Primitive::kPrimFloat:
3065 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00003066 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003067 break;
3068
3069 default:
3070 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3071 }
3072}
3073
3074void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
3075 LocationSummary* locations =
3076 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3077 InvokeRuntimeCallingConvention calling_convention;
3078 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003079 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003080 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003081 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003082 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
Mathieu Chartiere401d142015-04-22 13:56:20 -07003083 void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003084}
3085
3086void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
3087 LocationSummary* locations = instruction->GetLocations();
3088 InvokeRuntimeCallingConvention calling_convention;
3089 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
3090 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003091 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003092 // Note: if heap poisoning is enabled, the entry point takes cares
3093 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003094 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3095 instruction,
3096 instruction->GetDexPc(),
3097 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003098 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003099}
3100
Alexandre Rames5319def2014-10-23 10:03:10 +01003101void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
3102 LocationSummary* locations =
3103 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3104 InvokeRuntimeCallingConvention calling_convention;
3105 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003106 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Rames5319def2014-10-23 10:03:10 +01003107 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Mathieu Chartiere401d142015-04-22 13:56:20 -07003108 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01003109}
3110
3111void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
3112 LocationSummary* locations = instruction->GetLocations();
3113 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
3114 DCHECK(type_index.Is(w0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003115 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003116 // Note: if heap poisoning is enabled, the entry point takes cares
3117 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003118 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3119 instruction,
3120 instruction->GetDexPc(),
3121 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003122 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01003123}
3124
3125void LocationsBuilderARM64::VisitNot(HNot* instruction) {
3126 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00003127 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00003128 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01003129}
3130
3131void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00003132 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003133 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01003134 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01003135 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003136 break;
3137
3138 default:
3139 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
3140 }
3141}
3142
David Brazdil66d126e2015-04-03 16:02:44 +01003143void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
3144 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3145 locations->SetInAt(0, Location::RequiresRegister());
3146 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3147}
3148
3149void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01003150 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
3151}
3152
Alexandre Rames5319def2014-10-23 10:03:10 +01003153void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003154 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3155 ? LocationSummary::kCallOnSlowPath
3156 : LocationSummary::kNoCall;
3157 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01003158 locations->SetInAt(0, Location::RequiresRegister());
3159 if (instruction->HasUses()) {
3160 locations->SetOut(Location::SameAsFirstInput());
3161 }
3162}
3163
Calin Juravlecd6dffe2015-01-08 17:35:35 +00003164void InstructionCodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00003165 if (codegen_->CanMoveNullCheckToUser(instruction)) {
3166 return;
3167 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00003168
Alexandre Ramesd921d642015-04-16 15:07:16 +01003169 BlockPoolsScope block_pools(GetVIXLAssembler());
3170 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00003171 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
3172 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3173}
3174
3175void InstructionCodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003176 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
3177 codegen_->AddSlowPath(slow_path);
3178
3179 LocationSummary* locations = instruction->GetLocations();
3180 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00003181
3182 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01003183}
3184
Calin Juravlecd6dffe2015-01-08 17:35:35 +00003185void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003186 if (codegen_->IsImplicitNullCheckAllowed(instruction)) {
Calin Juravlecd6dffe2015-01-08 17:35:35 +00003187 GenerateImplicitNullCheck(instruction);
3188 } else {
3189 GenerateExplicitNullCheck(instruction);
3190 }
3191}
3192
Alexandre Rames67555f72014-11-18 10:55:16 +00003193void LocationsBuilderARM64::VisitOr(HOr* instruction) {
3194 HandleBinaryOp(instruction);
3195}
3196
3197void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
3198 HandleBinaryOp(instruction);
3199}
3200
Alexandre Rames3e69f162014-12-10 10:36:50 +00003201void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
3202 LOG(FATAL) << "Unreachable";
3203}
3204
3205void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
3206 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3207}
3208
Alexandre Rames5319def2014-10-23 10:03:10 +01003209void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
3210 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3211 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3212 if (location.IsStackSlot()) {
3213 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3214 } else if (location.IsDoubleStackSlot()) {
3215 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3216 }
3217 locations->SetOut(location);
3218}
3219
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003220void InstructionCodeGeneratorARM64::VisitParameterValue(
3221 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003222 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003223}
3224
3225void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
3226 LocationSummary* locations =
3227 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003228 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003229}
3230
3231void InstructionCodeGeneratorARM64::VisitCurrentMethod(
3232 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3233 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01003234}
3235
3236void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
3237 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3238 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
3239 locations->SetInAt(i, Location::Any());
3240 }
3241 locations->SetOut(Location::Any());
3242}
3243
3244void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07003245 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003246 LOG(FATAL) << "Unreachable";
3247}
3248
Serban Constantinescu02164b32014-11-13 14:05:07 +00003249void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00003250 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00003251 LocationSummary::CallKind call_kind =
3252 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00003253 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
3254
3255 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003256 case Primitive::kPrimInt:
3257 case Primitive::kPrimLong:
3258 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003259 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00003260 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3261 break;
3262
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00003263 case Primitive::kPrimFloat:
3264 case Primitive::kPrimDouble: {
3265 InvokeRuntimeCallingConvention calling_convention;
3266 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
3267 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
3268 locations->SetOut(calling_convention.GetReturnLocation(type));
3269
3270 break;
3271 }
3272
Serban Constantinescu02164b32014-11-13 14:05:07 +00003273 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00003274 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00003275 }
3276}
3277
3278void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
3279 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00003280
Serban Constantinescu02164b32014-11-13 14:05:07 +00003281 switch (type) {
3282 case Primitive::kPrimInt:
3283 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08003284 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003285 break;
3286 }
3287
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00003288 case Primitive::kPrimFloat:
3289 case Primitive::kPrimDouble: {
3290 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
3291 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00003292 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00003293 break;
3294 }
3295
Serban Constantinescu02164b32014-11-13 14:05:07 +00003296 default:
3297 LOG(FATAL) << "Unexpected rem type " << type;
3298 }
3299}
3300
Calin Juravle27df7582015-04-17 19:12:31 +01003301void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
3302 memory_barrier->SetLocations(nullptr);
3303}
3304
3305void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
3306 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
3307}
3308
Alexandre Rames5319def2014-10-23 10:03:10 +01003309void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
3310 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3311 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003312 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01003313}
3314
3315void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003316 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003317 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01003318}
3319
3320void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
3321 instruction->SetLocations(nullptr);
3322}
3323
3324void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07003325 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003326 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01003327}
3328
Serban Constantinescu02164b32014-11-13 14:05:07 +00003329void LocationsBuilderARM64::VisitShl(HShl* shl) {
3330 HandleShift(shl);
3331}
3332
3333void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
3334 HandleShift(shl);
3335}
3336
3337void LocationsBuilderARM64::VisitShr(HShr* shr) {
3338 HandleShift(shr);
3339}
3340
3341void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
3342 HandleShift(shr);
3343}
3344
Alexandre Rames5319def2014-10-23 10:03:10 +01003345void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
3346 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
3347 Primitive::Type field_type = store->InputAt(1)->GetType();
3348 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003349 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01003350 case Primitive::kPrimBoolean:
3351 case Primitive::kPrimByte:
3352 case Primitive::kPrimChar:
3353 case Primitive::kPrimShort:
3354 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003355 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01003356 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
3357 break;
3358
3359 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003360 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01003361 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
3362 break;
3363
3364 default:
3365 LOG(FATAL) << "Unimplemented local type " << field_type;
3366 }
3367}
3368
3369void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07003370 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01003371}
3372
3373void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00003374 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003375}
3376
3377void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00003378 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003379}
3380
Alexandre Rames67555f72014-11-18 10:55:16 +00003381void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003382 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003383}
3384
3385void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003386 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00003387}
3388
3389void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003390 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003391}
3392
Alexandre Rames67555f72014-11-18 10:55:16 +00003393void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003394 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003395}
3396
3397void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
3398 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3399}
3400
3401void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003402 HBasicBlock* block = instruction->GetBlock();
3403 if (block->GetLoopInformation() != nullptr) {
3404 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3405 // The back edge will generate the suspend check.
3406 return;
3407 }
3408 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3409 // The goto will generate the suspend check.
3410 return;
3411 }
3412 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01003413}
3414
3415void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
3416 temp->SetLocations(nullptr);
3417}
3418
3419void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
3420 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07003421 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01003422}
3423
Alexandre Rames67555f72014-11-18 10:55:16 +00003424void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
3425 LocationSummary* locations =
3426 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3427 InvokeRuntimeCallingConvention calling_convention;
3428 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
3429}
3430
3431void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
3432 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00003433 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003434 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00003435}
3436
3437void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
3438 LocationSummary* locations =
3439 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
3440 Primitive::Type input_type = conversion->GetInputType();
3441 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00003442 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00003443 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
3444 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
3445 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
3446 }
3447
Alexandre Rames542361f2015-01-29 16:57:31 +00003448 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00003449 locations->SetInAt(0, Location::RequiresFpuRegister());
3450 } else {
3451 locations->SetInAt(0, Location::RequiresRegister());
3452 }
3453
Alexandre Rames542361f2015-01-29 16:57:31 +00003454 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00003455 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3456 } else {
3457 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3458 }
3459}
3460
3461void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
3462 Primitive::Type result_type = conversion->GetResultType();
3463 Primitive::Type input_type = conversion->GetInputType();
3464
3465 DCHECK_NE(input_type, result_type);
3466
Alexandre Rames542361f2015-01-29 16:57:31 +00003467 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00003468 int result_size = Primitive::ComponentSize(result_type);
3469 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00003470 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003471 Register output = OutputRegister(conversion);
3472 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames3e69f162014-12-10 10:36:50 +00003473 if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
3474 __ Ubfx(output, source, 0, result_size * kBitsPerByte);
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01003475 } else if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
3476 // 'int' values are used directly as W registers, discarding the top
3477 // bits, so we don't need to sign-extend and can just perform a move.
3478 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
3479 // top 32 bits of the target register. We theoretically could leave those
3480 // bits unchanged, but we would have to make sure that no code uses a
3481 // 32bit input value as a 64bit value assuming that the top 32 bits are
3482 // zero.
3483 __ Mov(output.W(), source.W());
Alexandre Rames3e69f162014-12-10 10:36:50 +00003484 } else if ((result_type == Primitive::kPrimChar) ||
3485 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
3486 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00003487 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003488 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00003489 }
Alexandre Rames542361f2015-01-29 16:57:31 +00003490 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003491 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00003492 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003493 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
3494 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00003495 } else if (Primitive::IsFloatingPointType(result_type) &&
3496 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003497 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
3498 } else {
3499 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
3500 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00003501 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003502}
Alexandre Rames67555f72014-11-18 10:55:16 +00003503
Serban Constantinescu02164b32014-11-13 14:05:07 +00003504void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
3505 HandleShift(ushr);
3506}
3507
3508void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
3509 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00003510}
3511
3512void LocationsBuilderARM64::VisitXor(HXor* instruction) {
3513 HandleBinaryOp(instruction);
3514}
3515
3516void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
3517 HandleBinaryOp(instruction);
3518}
3519
Calin Juravleb1498f62015-02-16 13:13:29 +00003520void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction) {
3521 // Nothing to do, this should be removed during prepare for register allocator.
3522 UNUSED(instruction);
3523 LOG(FATAL) << "Unreachable";
3524}
3525
3526void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction) {
3527 // Nothing to do, this should be removed during prepare for register allocator.
3528 UNUSED(instruction);
3529 LOG(FATAL) << "Unreachable";
3530}
3531
Nicolas Geoffray2e7cd752015-07-10 11:38:52 +01003532void LocationsBuilderARM64::VisitFakeString(HFakeString* instruction) {
3533 DCHECK(codegen_->IsBaseline());
3534 LocationSummary* locations =
3535 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3536 locations->SetOut(Location::ConstantLocation(GetGraph()->GetNullConstant()));
3537}
3538
3539void InstructionCodeGeneratorARM64::VisitFakeString(HFakeString* instruction ATTRIBUTE_UNUSED) {
3540 DCHECK(codegen_->IsBaseline());
3541 // Will be generated at use site.
3542}
3543
Mark Mendellfe57faa2015-09-18 09:26:15 -04003544// Simple implementation of packed switch - generate cascaded compare/jumps.
3545void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
3546 LocationSummary* locations =
3547 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
3548 locations->SetInAt(0, Location::RequiresRegister());
3549}
3550
3551void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
3552 int32_t lower_bound = switch_instr->GetStartValue();
3553 int32_t num_entries = switch_instr->GetNumEntries();
3554 Register value_reg = InputRegisterAt(switch_instr, 0);
3555 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
3556
3557 // Create a series of compare/jumps.
3558 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
3559 for (int32_t i = 0; i < num_entries; i++) {
3560 int32_t case_value = lower_bound + i;
3561 vixl::Label* succ = codegen_->GetLabelOf(successors.at(i));
3562 if (case_value == 0) {
3563 __ Cbz(value_reg, succ);
3564 } else {
3565 __ Cmp(value_reg, vixl::Operand(case_value));
3566 __ B(eq, succ);
3567 }
3568 }
3569
3570 // And the default for any other value.
3571 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
3572 __ B(codegen_->GetLabelOf(default_block));
3573 }
3574}
3575
Alexandre Rames67555f72014-11-18 10:55:16 +00003576#undef __
3577#undef QUICK_ENTRY_POINT
3578
Alexandre Rames5319def2014-10-23 10:03:10 +01003579} // namespace arm64
3580} // namespace art