blob: e24745179a5c1c7f9ccab0b469069293ad15f99e [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
36
37using namespace vixl; // NOLINT(build/namespaces)
38
39#ifdef __
40#error "ARM64 Codegen VIXL macro-assembler macro already defined."
41#endif
42
Alexandre Rames5319def2014-10-23 10:03:10 +010043namespace art {
44
Roland Levillain22ccc3a2015-11-24 13:10:05 +000045template<class MirrorType>
46class GcRoot;
47
Alexandre Rames5319def2014-10-23 10:03:10 +010048namespace arm64 {
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050using helpers::CPURegisterFrom;
51using helpers::DRegisterFrom;
52using helpers::FPRegisterFrom;
53using helpers::HeapOperand;
54using helpers::HeapOperandFrom;
55using helpers::InputCPURegisterAt;
56using helpers::InputFPRegisterAt;
57using helpers::InputRegisterAt;
58using helpers::InputOperandAt;
59using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080060using helpers::LocationFrom;
61using helpers::OperandFromMemOperand;
62using helpers::OutputCPURegister;
63using helpers::OutputFPRegister;
64using helpers::OutputRegister;
65using helpers::RegisterFrom;
66using helpers::StackOperandFrom;
67using helpers::VIXLRegCodeFromART;
68using helpers::WRegisterFrom;
69using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000070using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080071using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080072
Alexandre Rames5319def2014-10-23 10:03:10 +010073static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000074// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080075// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
76// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000077static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010078
Alexandre Rames5319def2014-10-23 10:03:10 +010079inline Condition ARM64Condition(IfCondition cond) {
80 switch (cond) {
81 case kCondEQ: return eq;
82 case kCondNE: return ne;
83 case kCondLT: return lt;
84 case kCondLE: return le;
85 case kCondGT: return gt;
86 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070087 case kCondB: return lo;
88 case kCondBE: return ls;
89 case kCondA: return hi;
90 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010091 }
Roland Levillain7f63c522015-07-13 15:54:55 +000092 LOG(FATAL) << "Unreachable";
93 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010094}
95
Vladimir Markod6e069b2016-01-18 11:11:01 +000096inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
97 // The ARM64 condition codes can express all the necessary branches, see the
98 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
99 // There is no dex instruction or HIR that would need the missing conditions
100 // "equal or unordered" or "not equal".
101 switch (cond) {
102 case kCondEQ: return eq;
103 case kCondNE: return ne /* unordered */;
104 case kCondLT: return gt_bias ? cc : lt /* unordered */;
105 case kCondLE: return gt_bias ? ls : le /* unordered */;
106 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
107 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
108 default:
109 LOG(FATAL) << "UNREACHABLE";
110 UNREACHABLE();
111 }
112}
113
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000114Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
116 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
117 // but we use the exact registers for clarity.
118 if (return_type == Primitive::kPrimFloat) {
119 return LocationFrom(s0);
120 } else if (return_type == Primitive::kPrimDouble) {
121 return LocationFrom(d0);
122 } else if (return_type == Primitive::kPrimLong) {
123 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100124 } else if (return_type == Primitive::kPrimVoid) {
125 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000126 } else {
127 return LocationFrom(w0);
128 }
129}
130
Alexandre Rames5319def2014-10-23 10:03:10 +0100131Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100133}
134
Alexandre Rames67555f72014-11-18 10:55:16 +0000135#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
136#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100137
Zheng Xuda403092015-04-24 17:35:39 +0800138// Calculate memory accessing operand for save/restore live registers.
139static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
140 RegisterSet* register_set,
141 int64_t spill_offset,
142 bool is_save) {
143 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
144 codegen->GetNumberOfCoreRegisters(),
145 register_set->GetFloatingPointRegisters(),
146 codegen->GetNumberOfFloatingPointRegisters()));
147
148 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
149 register_set->GetCoreRegisters() & (~callee_saved_core_registers.list()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000150 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
151 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.list()));
Zheng Xuda403092015-04-24 17:35:39 +0800152
153 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
154 UseScratchRegisterScope temps(masm);
155
156 Register base = masm->StackPointer();
157 int64_t core_spill_size = core_list.TotalSizeInBytes();
158 int64_t fp_spill_size = fp_list.TotalSizeInBytes();
159 int64_t reg_size = kXRegSizeInBytes;
160 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
161 uint32_t ls_access_size = WhichPowerOf2(reg_size);
162 if (((core_list.Count() > 1) || (fp_list.Count() > 1)) &&
163 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
164 // If the offset does not fit in the instruction's immediate field, use an alternate register
165 // to compute the base address(float point registers spill base address).
166 Register new_base = temps.AcquireSameSizeAs(base);
167 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
168 base = new_base;
169 spill_offset = -core_spill_size;
170 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
171 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
172 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
173 }
174
175 if (is_save) {
176 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
177 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
178 } else {
179 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
180 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
181 }
182}
183
184void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
185 RegisterSet* register_set = locations->GetLiveRegisters();
186 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
187 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
188 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
189 // If the register holds an object, update the stack mask.
190 if (locations->RegisterContainsObject(i)) {
191 locations->SetStackBit(stack_offset / kVRegSize);
192 }
193 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
194 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
195 saved_core_stack_offsets_[i] = stack_offset;
196 stack_offset += kXRegSizeInBytes;
197 }
198 }
199
200 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
201 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
202 register_set->ContainsFloatingPointRegister(i)) {
203 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
204 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
205 saved_fpu_stack_offsets_[i] = stack_offset;
206 stack_offset += kDRegSizeInBytes;
207 }
208 }
209
210 SaveRestoreLiveRegistersHelper(codegen, register_set,
211 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
212}
213
214void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
215 RegisterSet* register_set = locations->GetLiveRegisters();
216 SaveRestoreLiveRegistersHelper(codegen, register_set,
217 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
218}
219
Alexandre Rames5319def2014-10-23 10:03:10 +0100220class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
221 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000222 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100223
Alexandre Rames67555f72014-11-18 10:55:16 +0000224 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100225 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000226 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227
Alexandre Rames5319def2014-10-23 10:03:10 +0100228 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000229 if (instruction_->CanThrowIntoCatchBlock()) {
230 // Live registers will be restored in the catch block if caught.
231 SaveLiveRegisters(codegen, instruction_->GetLocations());
232 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000233 // We're moving two locations to locations that could overlap, so we need a parallel
234 // move resolver.
235 InvokeRuntimeCallingConvention calling_convention;
236 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100237 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
238 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000239 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000240 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800241 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100242 }
243
Alexandre Rames8158f282015-08-07 10:26:17 +0100244 bool IsFatal() const OVERRIDE { return true; }
245
Alexandre Rames9931f312015-06-19 14:47:01 +0100246 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
247
Alexandre Rames5319def2014-10-23 10:03:10 +0100248 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100249 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
250};
251
Alexandre Rames67555f72014-11-18 10:55:16 +0000252class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
253 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000254 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000255
256 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
257 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
258 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000259 if (instruction_->CanThrowIntoCatchBlock()) {
260 // Live registers will be restored in the catch block if caught.
261 SaveLiveRegisters(codegen, instruction_->GetLocations());
262 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000263 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000264 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800265 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000266 }
267
Alexandre Rames8158f282015-08-07 10:26:17 +0100268 bool IsFatal() const OVERRIDE { return true; }
269
Alexandre Rames9931f312015-06-19 14:47:01 +0100270 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
271
Alexandre Rames67555f72014-11-18 10:55:16 +0000272 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000273 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
274};
275
276class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
277 public:
278 LoadClassSlowPathARM64(HLoadClass* cls,
279 HInstruction* at,
280 uint32_t dex_pc,
281 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000282 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000283 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
284 }
285
286 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
287 LocationSummary* locations = at_->GetLocations();
288 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
289
290 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000291 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000292
293 InvokeRuntimeCallingConvention calling_convention;
294 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000295 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
296 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000297 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800298 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100299 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800300 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100301 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000303
304 // Move the class to the desired location.
305 Location out = locations->Out();
306 if (out.IsValid()) {
307 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
308 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000309 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000310 }
311
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000312 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000313 __ B(GetExitLabel());
314 }
315
Alexandre Rames9931f312015-06-19 14:47:01 +0100316 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
317
Alexandre Rames67555f72014-11-18 10:55:16 +0000318 private:
319 // The class this slow path will load.
320 HLoadClass* const cls_;
321
322 // The instruction where this slow path is happening.
323 // (Might be the load class or an initialization check).
324 HInstruction* const at_;
325
326 // The dex PC of `at_`.
327 const uint32_t dex_pc_;
328
329 // Whether to initialize the class.
330 const bool do_clinit_;
331
332 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
333};
334
335class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
336 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000337 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000338
339 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
340 LocationSummary* locations = instruction_->GetLocations();
341 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
342 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
343
344 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000345 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000346
347 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000348 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
349 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Alexandre Rames67555f72014-11-18 10:55:16 +0000350 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000351 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100352 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000353 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000354 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000355
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000356 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000357 __ B(GetExitLabel());
358 }
359
Alexandre Rames9931f312015-06-19 14:47:01 +0100360 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
361
Alexandre Rames67555f72014-11-18 10:55:16 +0000362 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000363 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
364};
365
Alexandre Rames5319def2014-10-23 10:03:10 +0100366class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
367 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000368 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100369
Alexandre Rames67555f72014-11-18 10:55:16 +0000370 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
371 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100372 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000373 if (instruction_->CanThrowIntoCatchBlock()) {
374 // Live registers will be restored in the catch block if caught.
375 SaveLiveRegisters(codegen, instruction_->GetLocations());
376 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000377 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000378 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800379 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100380 }
381
Alexandre Rames8158f282015-08-07 10:26:17 +0100382 bool IsFatal() const OVERRIDE { return true; }
383
Alexandre Rames9931f312015-06-19 14:47:01 +0100384 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
385
Alexandre Rames5319def2014-10-23 10:03:10 +0100386 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100387 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
388};
389
390class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
391 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100392 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000393 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100394
Alexandre Rames67555f72014-11-18 10:55:16 +0000395 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
396 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100397 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000398 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000399 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000400 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800401 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000402 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000403 if (successor_ == nullptr) {
404 __ B(GetReturnLabel());
405 } else {
406 __ B(arm64_codegen->GetLabelOf(successor_));
407 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100408 }
409
410 vixl::Label* GetReturnLabel() {
411 DCHECK(successor_ == nullptr);
412 return &return_label_;
413 }
414
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100415 HBasicBlock* GetSuccessor() const {
416 return successor_;
417 }
418
Alexandre Rames9931f312015-06-19 14:47:01 +0100419 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
420
Alexandre Rames5319def2014-10-23 10:03:10 +0100421 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100422 // If not null, the block to branch to after the suspend check.
423 HBasicBlock* const successor_;
424
425 // If `successor_` is null, the label to branch to after the suspend check.
426 vixl::Label return_label_;
427
428 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
429};
430
Alexandre Rames67555f72014-11-18 10:55:16 +0000431class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
432 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000433 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000434 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000435
436 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000437 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100438 Location class_to_check = locations->InAt(1);
439 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
440 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000441 DCHECK(instruction_->IsCheckCast()
442 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
443 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100444 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000445
Alexandre Rames67555f72014-11-18 10:55:16 +0000446 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000447
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000448 if (!is_fatal_) {
449 SaveLiveRegisters(codegen, locations);
450 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000451
452 // We're moving two locations to locations that could overlap, so we need a parallel
453 // move resolver.
454 InvokeRuntimeCallingConvention calling_convention;
455 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100456 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
457 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000458
459 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000460 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100461 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Roland Levillain888d0672015-11-23 18:53:50 +0000462 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
463 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000464 Primitive::Type ret_type = instruction_->GetType();
465 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
466 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
467 } else {
468 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100469 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800470 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000471 }
472
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000473 if (!is_fatal_) {
474 RestoreLiveRegisters(codegen, locations);
475 __ B(GetExitLabel());
476 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000477 }
478
Alexandre Rames9931f312015-06-19 14:47:01 +0100479 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000480 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100481
Alexandre Rames67555f72014-11-18 10:55:16 +0000482 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000483 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000484
Alexandre Rames67555f72014-11-18 10:55:16 +0000485 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
486};
487
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700488class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
489 public:
Aart Bik42249c32016-01-07 15:33:50 -0800490 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000491 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700492
493 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800494 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700495 __ Bind(GetEntryLabel());
496 SaveLiveRegisters(codegen, instruction_->GetLocations());
Aart Bik42249c32016-01-07 15:33:50 -0800497 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
498 instruction_,
499 instruction_->GetDexPc(),
500 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000501 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700502 }
503
Alexandre Rames9931f312015-06-19 14:47:01 +0100504 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
505
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700506 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700507 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
508};
509
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100510class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
511 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000512 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100513
514 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
515 LocationSummary* locations = instruction_->GetLocations();
516 __ Bind(GetEntryLabel());
517 SaveLiveRegisters(codegen, locations);
518
519 InvokeRuntimeCallingConvention calling_convention;
520 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
521 parallel_move.AddMove(
522 locations->InAt(0),
523 LocationFrom(calling_convention.GetRegisterAt(0)),
524 Primitive::kPrimNot,
525 nullptr);
526 parallel_move.AddMove(
527 locations->InAt(1),
528 LocationFrom(calling_convention.GetRegisterAt(1)),
529 Primitive::kPrimInt,
530 nullptr);
531 parallel_move.AddMove(
532 locations->InAt(2),
533 LocationFrom(calling_convention.GetRegisterAt(2)),
534 Primitive::kPrimNot,
535 nullptr);
536 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
537
538 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
539 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
540 instruction_,
541 instruction_->GetDexPc(),
542 this);
543 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
544 RestoreLiveRegisters(codegen, locations);
545 __ B(GetExitLabel());
546 }
547
548 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
549
550 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100551 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
552};
553
Zheng Xu3927c8b2015-11-18 17:46:25 +0800554void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
555 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000556 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800557
558 // We are about to use the assembler to place literals directly. Make sure we have enough
559 // underlying code buffer and we have generated the jump table with right size.
560 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
561 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
562
563 __ Bind(&table_start_);
564 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
565 for (uint32_t i = 0; i < num_entries; i++) {
566 vixl::Label* target_label = codegen->GetLabelOf(successors[i]);
567 DCHECK(target_label->IsBound());
568 ptrdiff_t jump_offset = target_label->location() - table_start_.location();
569 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
570 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
571 Literal<int32_t> literal(jump_offset);
572 __ place(&literal);
573 }
574}
575
Roland Levillain44015862016-01-22 11:47:17 +0000576// Slow path marking an object during a read barrier.
577class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
578 public:
579 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000580 : SlowPathCodeARM64(instruction), out_(out), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000581 DCHECK(kEmitCompilerReadBarrier);
582 }
583
584 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
585
586 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
587 LocationSummary* locations = instruction_->GetLocations();
588 Primitive::Type type = Primitive::kPrimNot;
589 DCHECK(locations->CanCall());
590 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
591 DCHECK(instruction_->IsInstanceFieldGet() ||
592 instruction_->IsStaticFieldGet() ||
593 instruction_->IsArrayGet() ||
594 instruction_->IsLoadClass() ||
595 instruction_->IsLoadString() ||
596 instruction_->IsInstanceOf() ||
Roland Levillainc0cdb0b2016-06-23 13:53:42 +0100597 instruction_->IsCheckCast() ||
598 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
599 instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000600 << "Unexpected instruction in read barrier marking slow path: "
601 << instruction_->DebugName();
602
603 __ Bind(GetEntryLabel());
604 SaveLiveRegisters(codegen, locations);
605
606 InvokeRuntimeCallingConvention calling_convention;
607 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
608 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)), obj_, type);
609 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
610 instruction_,
611 instruction_->GetDexPc(),
612 this);
613 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
614 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
615
616 RestoreLiveRegisters(codegen, locations);
617 __ B(GetExitLabel());
618 }
619
620 private:
Roland Levillain44015862016-01-22 11:47:17 +0000621 const Location out_;
622 const Location obj_;
623
624 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
625};
626
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000627// Slow path generating a read barrier for a heap reference.
628class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
629 public:
630 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
631 Location out,
632 Location ref,
633 Location obj,
634 uint32_t offset,
635 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000636 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000637 out_(out),
638 ref_(ref),
639 obj_(obj),
640 offset_(offset),
641 index_(index) {
642 DCHECK(kEmitCompilerReadBarrier);
643 // If `obj` is equal to `out` or `ref`, it means the initial object
644 // has been overwritten by (or after) the heap object reference load
645 // to be instrumented, e.g.:
646 //
647 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000648 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000649 //
650 // In that case, we have lost the information about the original
651 // object, and the emitted read barrier cannot work properly.
652 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
653 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
654 }
655
656 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
657 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
658 LocationSummary* locations = instruction_->GetLocations();
659 Primitive::Type type = Primitive::kPrimNot;
660 DCHECK(locations->CanCall());
661 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillainc0cdb0b2016-06-23 13:53:42 +0100662 DCHECK(instruction_->IsInstanceFieldGet() ||
663 instruction_->IsStaticFieldGet() ||
664 instruction_->IsArrayGet() ||
665 instruction_->IsInstanceOf() ||
666 instruction_->IsCheckCast() ||
667 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
Roland Levillain44015862016-01-22 11:47:17 +0000668 instruction_->GetLocations()->Intrinsified()))
669 << "Unexpected instruction in read barrier for heap reference slow path: "
670 << instruction_->DebugName();
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000671 // The read barrier instrumentation does not support the
672 // HArm64IntermediateAddress instruction yet.
673 DCHECK(!(instruction_->IsArrayGet() &&
674 instruction_->AsArrayGet()->GetArray()->IsArm64IntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000675
676 __ Bind(GetEntryLabel());
677
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000678 SaveLiveRegisters(codegen, locations);
679
680 // We may have to change the index's value, but as `index_` is a
681 // constant member (like other "inputs" of this slow path),
682 // introduce a copy of it, `index`.
683 Location index = index_;
684 if (index_.IsValid()) {
Roland Levillainc0cdb0b2016-06-23 13:53:42 +0100685 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000686 if (instruction_->IsArrayGet()) {
687 // Compute the actual memory offset and store it in `index`.
688 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
689 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
690 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
691 // We are about to change the value of `index_reg` (see the
692 // calls to vixl::MacroAssembler::Lsl and
693 // vixl::MacroAssembler::Mov below), but it has
694 // not been saved by the previous call to
695 // art::SlowPathCode::SaveLiveRegisters, as it is a
696 // callee-save register --
697 // art::SlowPathCode::SaveLiveRegisters does not consider
698 // callee-save registers, as it has been designed with the
699 // assumption that callee-save registers are supposed to be
700 // handled by the called function. So, as a callee-save
701 // register, `index_reg` _would_ eventually be saved onto
702 // the stack, but it would be too late: we would have
703 // changed its value earlier. Therefore, we manually save
704 // it here into another freely available register,
705 // `free_reg`, chosen of course among the caller-save
706 // registers (as a callee-save `free_reg` register would
707 // exhibit the same problem).
708 //
709 // Note we could have requested a temporary register from
710 // the register allocator instead; but we prefer not to, as
711 // this is a slow path, and we know we can find a
712 // caller-save register that is available.
713 Register free_reg = FindAvailableCallerSaveRegister(codegen);
714 __ Mov(free_reg.W(), index_reg);
715 index_reg = free_reg;
716 index = LocationFrom(index_reg);
717 } else {
718 // The initial register stored in `index_` has already been
719 // saved in the call to art::SlowPathCode::SaveLiveRegisters
720 // (as it is not a callee-save register), so we can freely
721 // use it.
722 }
723 // Shifting the index value contained in `index_reg` by the scale
724 // factor (2) cannot overflow in practice, as the runtime is
725 // unable to allocate object arrays with a size larger than
726 // 2^26 - 1 (that is, 2^28 - 4 bytes).
727 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
728 static_assert(
729 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
730 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
731 __ Add(index_reg, index_reg, Operand(offset_));
732 } else {
Roland Levillainc0cdb0b2016-06-23 13:53:42 +0100733 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
734 // intrinsics, `index_` is not shifted by a scale factor of 2
735 // (as in the case of ArrayGet), as it is actually an offset
736 // to an object field within an object.
737 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000738 DCHECK(instruction_->GetLocations()->Intrinsified());
739 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
740 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
741 << instruction_->AsInvoke()->GetIntrinsic();
742 DCHECK_EQ(offset_, 0U);
743 DCHECK(index_.IsRegisterPair());
744 // UnsafeGet's offset location is a register pair, the low
745 // part contains the correct offset.
746 index = index_.ToLow();
747 }
748 }
749
750 // We're moving two or three locations to locations that could
751 // overlap, so we need a parallel move resolver.
752 InvokeRuntimeCallingConvention calling_convention;
753 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
754 parallel_move.AddMove(ref_,
755 LocationFrom(calling_convention.GetRegisterAt(0)),
756 type,
757 nullptr);
758 parallel_move.AddMove(obj_,
759 LocationFrom(calling_convention.GetRegisterAt(1)),
760 type,
761 nullptr);
762 if (index.IsValid()) {
763 parallel_move.AddMove(index,
764 LocationFrom(calling_convention.GetRegisterAt(2)),
765 Primitive::kPrimInt,
766 nullptr);
767 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
768 } else {
769 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
770 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
771 }
772 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
773 instruction_,
774 instruction_->GetDexPc(),
775 this);
776 CheckEntrypointTypes<
777 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
778 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
779
780 RestoreLiveRegisters(codegen, locations);
781
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000782 __ B(GetExitLabel());
783 }
784
785 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
786
787 private:
788 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
789 size_t ref = static_cast<int>(XRegisterFrom(ref_).code());
790 size_t obj = static_cast<int>(XRegisterFrom(obj_).code());
791 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
792 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
793 return Register(VIXLRegCodeFromART(i), kXRegSize);
794 }
795 }
796 // We shall never fail to find a free caller-save register, as
797 // there are more than two core caller-save registers on ARM64
798 // (meaning it is possible to find one which is different from
799 // `ref` and `obj`).
800 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
801 LOG(FATAL) << "Could not find a free register";
802 UNREACHABLE();
803 }
804
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000805 const Location out_;
806 const Location ref_;
807 const Location obj_;
808 const uint32_t offset_;
809 // An additional location containing an index to an array.
810 // Only used for HArrayGet and the UnsafeGetObject &
811 // UnsafeGetObjectVolatile intrinsics.
812 const Location index_;
813
814 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
815};
816
817// Slow path generating a read barrier for a GC root.
818class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
819 public:
820 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000821 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000822 DCHECK(kEmitCompilerReadBarrier);
823 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000824
825 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
826 LocationSummary* locations = instruction_->GetLocations();
827 Primitive::Type type = Primitive::kPrimNot;
828 DCHECK(locations->CanCall());
829 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000830 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
831 << "Unexpected instruction in read barrier for GC root slow path: "
832 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000833
834 __ Bind(GetEntryLabel());
835 SaveLiveRegisters(codegen, locations);
836
837 InvokeRuntimeCallingConvention calling_convention;
838 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
839 // The argument of the ReadBarrierForRootSlow is not a managed
840 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
841 // thus we need a 64-bit move here, and we cannot use
842 //
843 // arm64_codegen->MoveLocation(
844 // LocationFrom(calling_convention.GetRegisterAt(0)),
845 // root_,
846 // type);
847 //
848 // which would emit a 32-bit move, as `type` is a (32-bit wide)
849 // reference type (`Primitive::kPrimNot`).
850 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
851 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
852 instruction_,
853 instruction_->GetDexPc(),
854 this);
855 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
856 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
857
858 RestoreLiveRegisters(codegen, locations);
859 __ B(GetExitLabel());
860 }
861
862 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
863
864 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000865 const Location out_;
866 const Location root_;
867
868 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
869};
870
Alexandre Rames5319def2014-10-23 10:03:10 +0100871#undef __
872
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100873Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100874 Location next_location;
875 if (type == Primitive::kPrimVoid) {
876 LOG(FATAL) << "Unreachable type " << type;
877 }
878
Alexandre Rames542361f2015-01-29 16:57:31 +0000879 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100880 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
881 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000882 } else if (!Primitive::IsFloatingPointType(type) &&
883 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000884 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
885 } else {
886 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000887 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
888 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100889 }
890
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000891 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000892 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100893 return next_location;
894}
895
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100896Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100897 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100898}
899
Serban Constantinescu579885a2015-02-22 20:51:33 +0000900CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
901 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100902 const CompilerOptions& compiler_options,
903 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100904 : CodeGenerator(graph,
905 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000906 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000907 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000908 callee_saved_core_registers.list(),
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000909 callee_saved_fp_registers.list(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100910 compiler_options,
911 stats),
Alexandre Ramesc393d632016-04-15 11:54:06 +0100912 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800913 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100914 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000915 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000916 move_resolver_(graph->GetArena(), this),
Vladimir Markod1ee8092016-04-13 11:59:46 +0100917 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000918 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000919 uint32_literals_(std::less<uint32_t>(),
920 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100921 uint64_literals_(std::less<uint64_t>(),
922 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
923 method_patches_(MethodReferenceComparator(),
924 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
925 call_patches_(MethodReferenceComparator(),
926 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
927 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000928 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
929 boot_image_string_patches_(StringReferenceValueComparator(),
930 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
931 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
932 boot_image_address_patches_(std::less<uint32_t>(),
933 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000934 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000935 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000936}
Alexandre Rames5319def2014-10-23 10:03:10 +0100937
Alexandre Rames67555f72014-11-18 10:55:16 +0000938#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100939
Zheng Xu3927c8b2015-11-18 17:46:25 +0800940void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc393d632016-04-15 11:54:06 +0100941 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800942 jump_table->EmitTable(this);
943 }
944}
945
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000946void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800947 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000948 // Ensure we emit the literal pool.
949 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000950
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000951 CodeGenerator::Finalize(allocator);
952}
953
Zheng Xuad4450e2015-04-17 18:48:56 +0800954void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
955 // Note: There are 6 kinds of moves:
956 // 1. constant -> GPR/FPR (non-cycle)
957 // 2. constant -> stack (non-cycle)
958 // 3. GPR/FPR -> GPR/FPR
959 // 4. GPR/FPR -> stack
960 // 5. stack -> GPR/FPR
961 // 6. stack -> stack (non-cycle)
962 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
963 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
964 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
965 // dependency.
966 vixl_temps_.Open(GetVIXLAssembler());
967}
968
969void ParallelMoveResolverARM64::FinishEmitNativeCode() {
970 vixl_temps_.Close();
971}
972
973Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
974 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
975 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
976 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
977 Location scratch = GetScratchLocation(kind);
978 if (!scratch.Equals(Location::NoLocation())) {
979 return scratch;
980 }
981 // Allocate from VIXL temp registers.
982 if (kind == Location::kRegister) {
983 scratch = LocationFrom(vixl_temps_.AcquireX());
984 } else {
985 DCHECK(kind == Location::kFpuRegister);
986 scratch = LocationFrom(vixl_temps_.AcquireD());
987 }
988 AddScratchLocation(scratch);
989 return scratch;
990}
991
992void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
993 if (loc.IsRegister()) {
994 vixl_temps_.Release(XRegisterFrom(loc));
995 } else {
996 DCHECK(loc.IsFpuRegister());
997 vixl_temps_.Release(DRegisterFrom(loc));
998 }
999 RemoveScratchLocation(loc);
1000}
1001
Alexandre Rames3e69f162014-12-10 10:36:50 +00001002void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001003 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001004 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001005}
1006
Alexandre Rames5319def2014-10-23 10:03:10 +01001007void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001008 MacroAssembler* masm = GetVIXLAssembler();
1009 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001010 __ Bind(&frame_entry_label_);
1011
Serban Constantinescu02164b32014-11-13 14:05:07 +00001012 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1013 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001014 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001015 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001016 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001017 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001018 __ Ldr(wzr, MemOperand(temp, 0));
1019 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001020 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001021
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001022 if (!HasEmptyFrame()) {
1023 int frame_size = GetFrameSize();
1024 // Stack layout:
1025 // sp[frame_size - 8] : lr.
1026 // ... : other preserved core registers.
1027 // ... : other preserved fp registers.
1028 // ... : reserved frame space.
1029 // sp[0] : current method.
1030 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001031 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001032 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1033 frame_size - GetCoreSpillSize());
1034 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1035 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001036 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001037}
1038
1039void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001040 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001041 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001042 if (!HasEmptyFrame()) {
1043 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001044 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1045 frame_size - FrameEntrySpillSize());
1046 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1047 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001048 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001049 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001050 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001051 __ Ret();
1052 GetAssembler()->cfi().RestoreState();
1053 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001054}
1055
Zheng Xuda403092015-04-24 17:35:39 +08001056vixl::CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
1057 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
1058 return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize,
1059 core_spill_mask_);
1060}
1061
1062vixl::CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
1063 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1064 GetNumberOfFloatingPointRegisters()));
1065 return vixl::CPURegList(vixl::CPURegister::kFPRegister, vixl::kDRegSize,
1066 fpu_spill_mask_);
1067}
1068
Alexandre Rames5319def2014-10-23 10:03:10 +01001069void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1070 __ Bind(GetLabelOf(block));
1071}
1072
Calin Juravle175dc732015-08-25 15:42:32 +01001073void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1074 DCHECK(location.IsRegister());
1075 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1076}
1077
Calin Juravlee460d1d2015-09-29 04:52:17 +01001078void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1079 if (location.IsRegister()) {
1080 locations->AddTemp(location);
1081 } else {
1082 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1083 }
1084}
1085
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001086void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001087 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001088 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001089 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +01001090 vixl::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001091 if (value_can_be_null) {
1092 __ Cbz(value, &done);
1093 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001094 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
1095 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001096 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001097 if (value_can_be_null) {
1098 __ Bind(&done);
1099 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001100}
1101
David Brazdil58282f42016-01-14 12:45:10 +00001102void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001103 // Blocked core registers:
1104 // lr : Runtime reserved.
1105 // tr : Runtime reserved.
1106 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1107 // ip1 : VIXL core temp.
1108 // ip0 : VIXL core temp.
1109 //
1110 // Blocked fp registers:
1111 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001112 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1113 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001114 while (!reserved_core_registers.IsEmpty()) {
1115 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
1116 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001117
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001118 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001119 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001120 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
1121 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001122
David Brazdil58282f42016-01-14 12:45:10 +00001123 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001124 // Stubs do not save callee-save floating point registers. If the graph
1125 // is debuggable, we need to deal with these registers differently. For
1126 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001127 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1128 while (!reserved_fp_registers_debuggable.IsEmpty()) {
1129 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().code()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001130 }
1131 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001132}
1133
Alexandre Rames3e69f162014-12-10 10:36:50 +00001134size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1135 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1136 __ Str(reg, MemOperand(sp, stack_index));
1137 return kArm64WordSize;
1138}
1139
1140size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1141 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1142 __ Ldr(reg, MemOperand(sp, stack_index));
1143 return kArm64WordSize;
1144}
1145
1146size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1147 FPRegister reg = FPRegister(reg_id, kDRegSize);
1148 __ Str(reg, MemOperand(sp, stack_index));
1149 return kArm64WordSize;
1150}
1151
1152size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1153 FPRegister reg = FPRegister(reg_id, kDRegSize);
1154 __ Ldr(reg, MemOperand(sp, stack_index));
1155 return kArm64WordSize;
1156}
1157
Alexandre Rames5319def2014-10-23 10:03:10 +01001158void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001159 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001160}
1161
1162void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001163 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001164}
1165
Alexandre Rames67555f72014-11-18 10:55:16 +00001166void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001167 if (constant->IsIntConstant()) {
1168 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1169 } else if (constant->IsLongConstant()) {
1170 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1171 } else if (constant->IsNullConstant()) {
1172 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001173 } else if (constant->IsFloatConstant()) {
1174 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1175 } else {
1176 DCHECK(constant->IsDoubleConstant());
1177 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1178 }
1179}
1180
Alexandre Rames3e69f162014-12-10 10:36:50 +00001181
1182static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1183 DCHECK(constant.IsConstant());
1184 HConstant* cst = constant.GetConstant();
1185 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001186 // Null is mapped to a core W register, which we associate with kPrimInt.
1187 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001188 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1189 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1190 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1191}
1192
Calin Juravlee460d1d2015-09-29 04:52:17 +01001193void CodeGeneratorARM64::MoveLocation(Location destination,
1194 Location source,
1195 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001196 if (source.Equals(destination)) {
1197 return;
1198 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001199
1200 // A valid move can always be inferred from the destination and source
1201 // locations. When moving from and to a register, the argument type can be
1202 // used to generate 32bit instead of 64bit moves. In debug mode we also
1203 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001204 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001205
1206 if (destination.IsRegister() || destination.IsFpuRegister()) {
1207 if (unspecified_type) {
1208 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1209 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001210 (src_cst != nullptr && (src_cst->IsIntConstant()
1211 || src_cst->IsFloatConstant()
1212 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001213 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001214 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001215 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001216 // If the source is a double stack slot or a 64bit constant, a 64bit
1217 // type is appropriate. Else the source is a register, and since the
1218 // type has not been specified, we chose a 64bit type to force a 64bit
1219 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001220 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001221 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001222 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001223 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1224 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1225 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001226 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1227 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1228 __ Ldr(dst, StackOperandFrom(source));
1229 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001230 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001231 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001232 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001233 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001234 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001235 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001236 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001237 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1238 ? Primitive::kPrimLong
1239 : Primitive::kPrimInt;
1240 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1241 }
1242 } else {
1243 DCHECK(source.IsFpuRegister());
1244 if (destination.IsRegister()) {
1245 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1246 ? Primitive::kPrimDouble
1247 : Primitive::kPrimFloat;
1248 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1249 } else {
1250 DCHECK(destination.IsFpuRegister());
1251 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001252 }
1253 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001254 } else { // The destination is not a register. It must be a stack slot.
1255 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1256 if (source.IsRegister() || source.IsFpuRegister()) {
1257 if (unspecified_type) {
1258 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001259 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001260 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001261 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001262 }
1263 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001264 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1265 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1266 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001267 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001268 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1269 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001270 UseScratchRegisterScope temps(GetVIXLAssembler());
1271 HConstant* src_cst = source.GetConstant();
1272 CPURegister temp;
Alexandre Rames7be929d2016-08-02 13:45:28 +01001273 if (src_cst->IsZeroBitPattern()) {
1274 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant()) ? xzr : wzr;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001275 } else {
Alexandre Rames7be929d2016-08-02 13:45:28 +01001276 if (src_cst->IsIntConstant()) {
1277 temp = temps.AcquireW();
1278 } else if (src_cst->IsLongConstant()) {
1279 temp = temps.AcquireX();
1280 } else if (src_cst->IsFloatConstant()) {
1281 temp = temps.AcquireS();
1282 } else {
1283 DCHECK(src_cst->IsDoubleConstant());
1284 temp = temps.AcquireD();
1285 }
1286 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001287 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001288 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001289 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001290 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001291 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001292 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001293 // There is generally less pressure on FP registers.
1294 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001295 __ Ldr(temp, StackOperandFrom(source));
1296 __ Str(temp, StackOperandFrom(destination));
1297 }
1298 }
1299}
1300
1301void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001302 CPURegister dst,
1303 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001304 switch (type) {
1305 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001306 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001307 break;
1308 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001309 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001310 break;
1311 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001312 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001313 break;
1314 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001315 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001316 break;
1317 case Primitive::kPrimInt:
1318 case Primitive::kPrimNot:
1319 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001320 case Primitive::kPrimFloat:
1321 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001322 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001323 __ Ldr(dst, src);
1324 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001325 case Primitive::kPrimVoid:
1326 LOG(FATAL) << "Unreachable type " << type;
1327 }
1328}
1329
Calin Juravle77520bc2015-01-12 18:45:46 +00001330void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001331 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001332 const MemOperand& src,
1333 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001334 MacroAssembler* masm = GetVIXLAssembler();
1335 BlockPoolsScope block_pools(masm);
1336 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001337 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001338 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001339
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001340 DCHECK(!src.IsPreIndex());
1341 DCHECK(!src.IsPostIndex());
1342
1343 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001344 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001345 MemOperand base = MemOperand(temp_base);
1346 switch (type) {
1347 case Primitive::kPrimBoolean:
1348 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001349 if (needs_null_check) {
1350 MaybeRecordImplicitNullCheck(instruction);
1351 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001352 break;
1353 case Primitive::kPrimByte:
1354 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001355 if (needs_null_check) {
1356 MaybeRecordImplicitNullCheck(instruction);
1357 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001358 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1359 break;
1360 case Primitive::kPrimChar:
1361 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001362 if (needs_null_check) {
1363 MaybeRecordImplicitNullCheck(instruction);
1364 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001365 break;
1366 case Primitive::kPrimShort:
1367 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001368 if (needs_null_check) {
1369 MaybeRecordImplicitNullCheck(instruction);
1370 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001371 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1372 break;
1373 case Primitive::kPrimInt:
1374 case Primitive::kPrimNot:
1375 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001376 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001377 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001378 if (needs_null_check) {
1379 MaybeRecordImplicitNullCheck(instruction);
1380 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001381 break;
1382 case Primitive::kPrimFloat:
1383 case Primitive::kPrimDouble: {
1384 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001385 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001386
1387 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1388 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001389 if (needs_null_check) {
1390 MaybeRecordImplicitNullCheck(instruction);
1391 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001392 __ Fmov(FPRegister(dst), temp);
1393 break;
1394 }
1395 case Primitive::kPrimVoid:
1396 LOG(FATAL) << "Unreachable type " << type;
1397 }
1398}
1399
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001400void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001401 CPURegister src,
1402 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001403 switch (type) {
1404 case Primitive::kPrimBoolean:
1405 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001406 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001407 break;
1408 case Primitive::kPrimChar:
1409 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001410 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001411 break;
1412 case Primitive::kPrimInt:
1413 case Primitive::kPrimNot:
1414 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001415 case Primitive::kPrimFloat:
1416 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001417 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001418 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001419 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001420 case Primitive::kPrimVoid:
1421 LOG(FATAL) << "Unreachable type " << type;
1422 }
1423}
1424
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001425void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1426 CPURegister src,
1427 const MemOperand& dst) {
1428 UseScratchRegisterScope temps(GetVIXLAssembler());
1429 Register temp_base = temps.AcquireX();
1430
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001431 DCHECK(!dst.IsPreIndex());
1432 DCHECK(!dst.IsPostIndex());
1433
1434 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001435 Operand op = OperandFromMemOperand(dst);
1436 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001437 MemOperand base = MemOperand(temp_base);
1438 switch (type) {
1439 case Primitive::kPrimBoolean:
1440 case Primitive::kPrimByte:
1441 __ Stlrb(Register(src), base);
1442 break;
1443 case Primitive::kPrimChar:
1444 case Primitive::kPrimShort:
1445 __ Stlrh(Register(src), base);
1446 break;
1447 case Primitive::kPrimInt:
1448 case Primitive::kPrimNot:
1449 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001450 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001451 __ Stlr(Register(src), base);
1452 break;
1453 case Primitive::kPrimFloat:
1454 case Primitive::kPrimDouble: {
1455 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001456 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001457
1458 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1459 __ Fmov(temp, FPRegister(src));
1460 __ Stlr(temp, base);
1461 break;
1462 }
1463 case Primitive::kPrimVoid:
1464 LOG(FATAL) << "Unreachable type " << type;
1465 }
1466}
1467
Calin Juravle175dc732015-08-25 15:42:32 +01001468void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1469 HInstruction* instruction,
1470 uint32_t dex_pc,
1471 SlowPathCode* slow_path) {
1472 InvokeRuntime(GetThreadOffset<kArm64WordSize>(entrypoint).Int32Value(),
1473 instruction,
1474 dex_pc,
1475 slow_path);
1476}
1477
Alexandre Rames67555f72014-11-18 10:55:16 +00001478void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1479 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001480 uint32_t dex_pc,
1481 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001482 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001483 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001484 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1485 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001486 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001487}
1488
1489void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1490 vixl::Register class_reg) {
1491 UseScratchRegisterScope temps(GetVIXLAssembler());
1492 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001493 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1494
Serban Constantinescu02164b32014-11-13 14:05:07 +00001495 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001496 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1497 __ Add(temp, class_reg, status_offset);
1498 __ Ldar(temp, HeapOperand(temp));
1499 __ Cmp(temp, mirror::Class::kStatusInitialized);
1500 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001501 __ Bind(slow_path->GetExitLabel());
1502}
Alexandre Rames5319def2014-10-23 10:03:10 +01001503
Roland Levillain44015862016-01-22 11:47:17 +00001504void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001505 BarrierType type = BarrierAll;
1506
1507 switch (kind) {
1508 case MemBarrierKind::kAnyAny:
1509 case MemBarrierKind::kAnyStore: {
1510 type = BarrierAll;
1511 break;
1512 }
1513 case MemBarrierKind::kLoadAny: {
1514 type = BarrierReads;
1515 break;
1516 }
1517 case MemBarrierKind::kStoreStore: {
1518 type = BarrierWrites;
1519 break;
1520 }
1521 default:
1522 LOG(FATAL) << "Unexpected memory barrier " << kind;
1523 }
1524 __ Dmb(InnerShareable, type);
1525}
1526
Serban Constantinescu02164b32014-11-13 14:05:07 +00001527void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1528 HBasicBlock* successor) {
1529 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001530 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1531 if (slow_path == nullptr) {
1532 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1533 instruction->SetSlowPath(slow_path);
1534 codegen_->AddSlowPath(slow_path);
1535 if (successor != nullptr) {
1536 DCHECK(successor->IsLoopHeader());
1537 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1538 }
1539 } else {
1540 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1541 }
1542
Serban Constantinescu02164b32014-11-13 14:05:07 +00001543 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1544 Register temp = temps.AcquireW();
1545
1546 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1547 if (successor == nullptr) {
1548 __ Cbnz(temp, slow_path->GetEntryLabel());
1549 __ Bind(slow_path->GetReturnLabel());
1550 } else {
1551 __ Cbz(temp, codegen_->GetLabelOf(successor));
1552 __ B(slow_path->GetEntryLabel());
1553 // slow_path will return to GetLabelOf(successor).
1554 }
1555}
1556
Alexandre Rames5319def2014-10-23 10:03:10 +01001557InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1558 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001559 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001560 assembler_(codegen->GetAssembler()),
1561 codegen_(codegen) {}
1562
1563#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001564 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001565
1566#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1567
1568enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001569 // Using a base helps identify when we hit such breakpoints.
1570 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001571#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1572 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1573#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1574};
1575
1576#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001577 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001578 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1579 } \
1580 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1581 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1582 locations->SetOut(Location::Any()); \
1583 }
1584 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1585#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1586
1587#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001588#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001589
Alexandre Rames67555f72014-11-18 10:55:16 +00001590void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001591 DCHECK_EQ(instr->InputCount(), 2U);
1592 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1593 Primitive::Type type = instr->GetResultType();
1594 switch (type) {
1595 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001596 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001597 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001598 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001599 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001600 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001601
1602 case Primitive::kPrimFloat:
1603 case Primitive::kPrimDouble:
1604 locations->SetInAt(0, Location::RequiresFpuRegister());
1605 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001606 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001607 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001608
Alexandre Rames5319def2014-10-23 10:03:10 +01001609 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001610 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001611 }
1612}
1613
Alexandre Rames09a99962015-04-15 11:47:56 +01001614void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001615 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1616
1617 bool object_field_get_with_read_barrier =
1618 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001619 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001620 new (GetGraph()->GetArena()) LocationSummary(instruction,
1621 object_field_get_with_read_barrier ?
1622 LocationSummary::kCallOnSlowPath :
1623 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001624 locations->SetInAt(0, Location::RequiresRegister());
1625 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1626 locations->SetOut(Location::RequiresFpuRegister());
1627 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001628 // The output overlaps for an object field get when read barriers
1629 // are enabled: we do not want the load to overwrite the object's
1630 // location, as we need it to emit the read barrier.
1631 locations->SetOut(
1632 Location::RequiresRegister(),
1633 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001634 }
1635}
1636
1637void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1638 const FieldInfo& field_info) {
1639 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001640 LocationSummary* locations = instruction->GetLocations();
1641 Location base_loc = locations->InAt(0);
1642 Location out = locations->Out();
1643 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001644 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001645 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001646 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001647
Roland Levillain44015862016-01-22 11:47:17 +00001648 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1649 // Object FieldGet with Baker's read barrier case.
1650 MacroAssembler* masm = GetVIXLAssembler();
1651 UseScratchRegisterScope temps(masm);
1652 // /* HeapReference<Object> */ out = *(base + offset)
1653 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1654 Register temp = temps.AcquireW();
1655 // Note that potential implicit null checks are handled in this
1656 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1657 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1658 instruction,
1659 out,
1660 base,
1661 offset,
1662 temp,
1663 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001664 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001665 } else {
1666 // General case.
1667 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001668 // Note that a potential implicit null check is handled in this
1669 // CodeGeneratorARM64::LoadAcquire call.
1670 // NB: LoadAcquire will record the pc info if needed.
1671 codegen_->LoadAcquire(
1672 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001673 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001674 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001675 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001676 }
Roland Levillain44015862016-01-22 11:47:17 +00001677 if (field_type == Primitive::kPrimNot) {
1678 // If read barriers are enabled, emit read barriers other than
1679 // Baker's using a slow path (and also unpoison the loaded
1680 // reference, if heap poisoning is enabled).
1681 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1682 }
Roland Levillain4d027112015-07-01 15:41:14 +01001683 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001684}
1685
1686void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1687 LocationSummary* locations =
1688 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1689 locations->SetInAt(0, Location::RequiresRegister());
1690 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1691 locations->SetInAt(1, Location::RequiresFpuRegister());
1692 } else {
1693 locations->SetInAt(1, Location::RequiresRegister());
1694 }
1695}
1696
1697void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001698 const FieldInfo& field_info,
1699 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001700 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001701 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001702
1703 Register obj = InputRegisterAt(instruction, 0);
1704 CPURegister value = InputCPURegisterAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001705 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001706 Offset offset = field_info.GetFieldOffset();
1707 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001708
Roland Levillain4d027112015-07-01 15:41:14 +01001709 {
1710 // We use a block to end the scratch scope before the write barrier, thus
1711 // freeing the temporary registers so they can be used in `MarkGCCard`.
1712 UseScratchRegisterScope temps(GetVIXLAssembler());
1713
1714 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1715 DCHECK(value.IsW());
1716 Register temp = temps.AcquireW();
1717 __ Mov(temp, value.W());
1718 GetAssembler()->PoisonHeapReference(temp.W());
1719 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001720 }
Roland Levillain4d027112015-07-01 15:41:14 +01001721
1722 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001723 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1724 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001725 } else {
1726 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1727 codegen_->MaybeRecordImplicitNullCheck(instruction);
1728 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001729 }
1730
1731 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001732 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001733 }
1734}
1735
Alexandre Rames67555f72014-11-18 10:55:16 +00001736void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001737 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001738
1739 switch (type) {
1740 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001741 case Primitive::kPrimLong: {
1742 Register dst = OutputRegister(instr);
1743 Register lhs = InputRegisterAt(instr, 0);
1744 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001745 if (instr->IsAdd()) {
1746 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001747 } else if (instr->IsAnd()) {
1748 __ And(dst, lhs, rhs);
1749 } else if (instr->IsOr()) {
1750 __ Orr(dst, lhs, rhs);
1751 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001752 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001753 } else if (instr->IsRor()) {
1754 if (rhs.IsImmediate()) {
1755 uint32_t shift = rhs.immediate() & (lhs.SizeInBits() - 1);
1756 __ Ror(dst, lhs, shift);
1757 } else {
1758 // Ensure shift distance is in the same size register as the result. If
1759 // we are rotating a long and the shift comes in a w register originally,
1760 // we don't need to sxtw for use as an x since the shift distances are
1761 // all & reg_bits - 1.
1762 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1763 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001764 } else {
1765 DCHECK(instr->IsXor());
1766 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001767 }
1768 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001769 }
1770 case Primitive::kPrimFloat:
1771 case Primitive::kPrimDouble: {
1772 FPRegister dst = OutputFPRegister(instr);
1773 FPRegister lhs = InputFPRegisterAt(instr, 0);
1774 FPRegister rhs = InputFPRegisterAt(instr, 1);
1775 if (instr->IsAdd()) {
1776 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001777 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001778 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001779 } else {
1780 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001781 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001782 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001783 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001784 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001785 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001786 }
1787}
1788
Serban Constantinescu02164b32014-11-13 14:05:07 +00001789void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1790 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1791
1792 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1793 Primitive::Type type = instr->GetResultType();
1794 switch (type) {
1795 case Primitive::kPrimInt:
1796 case Primitive::kPrimLong: {
1797 locations->SetInAt(0, Location::RequiresRegister());
1798 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1799 locations->SetOut(Location::RequiresRegister());
1800 break;
1801 }
1802 default:
1803 LOG(FATAL) << "Unexpected shift type " << type;
1804 }
1805}
1806
1807void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1808 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1809
1810 Primitive::Type type = instr->GetType();
1811 switch (type) {
1812 case Primitive::kPrimInt:
1813 case Primitive::kPrimLong: {
1814 Register dst = OutputRegister(instr);
1815 Register lhs = InputRegisterAt(instr, 0);
1816 Operand rhs = InputOperandAt(instr, 1);
1817 if (rhs.IsImmediate()) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001818 uint32_t shift_value = rhs.immediate() &
1819 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001820 if (instr->IsShl()) {
1821 __ Lsl(dst, lhs, shift_value);
1822 } else if (instr->IsShr()) {
1823 __ Asr(dst, lhs, shift_value);
1824 } else {
1825 __ Lsr(dst, lhs, shift_value);
1826 }
1827 } else {
1828 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1829
1830 if (instr->IsShl()) {
1831 __ Lsl(dst, lhs, rhs_reg);
1832 } else if (instr->IsShr()) {
1833 __ Asr(dst, lhs, rhs_reg);
1834 } else {
1835 __ Lsr(dst, lhs, rhs_reg);
1836 }
1837 }
1838 break;
1839 }
1840 default:
1841 LOG(FATAL) << "Unexpected shift operation type " << type;
1842 }
1843}
1844
Alexandre Rames5319def2014-10-23 10:03:10 +01001845void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001846 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001847}
1848
1849void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001850 HandleBinaryOp(instruction);
1851}
1852
1853void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1854 HandleBinaryOp(instruction);
1855}
1856
1857void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1858 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001859}
1860
Artem Serov7fc63502016-02-09 17:15:29 +00001861void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001862 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1863 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1864 locations->SetInAt(0, Location::RequiresRegister());
1865 // There is no immediate variant of negated bitwise instructions in AArch64.
1866 locations->SetInAt(1, Location::RequiresRegister());
1867 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1868}
1869
Artem Serov7fc63502016-02-09 17:15:29 +00001870void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001871 Register dst = OutputRegister(instr);
1872 Register lhs = InputRegisterAt(instr, 0);
1873 Register rhs = InputRegisterAt(instr, 1);
1874
1875 switch (instr->GetOpKind()) {
1876 case HInstruction::kAnd:
1877 __ Bic(dst, lhs, rhs);
1878 break;
1879 case HInstruction::kOr:
1880 __ Orn(dst, lhs, rhs);
1881 break;
1882 case HInstruction::kXor:
1883 __ Eon(dst, lhs, rhs);
1884 break;
1885 default:
1886 LOG(FATAL) << "Unreachable";
1887 }
1888}
1889
Alexandre Rames8626b742015-11-25 16:28:08 +00001890void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1891 HArm64DataProcWithShifterOp* instruction) {
1892 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1893 instruction->GetType() == Primitive::kPrimLong);
1894 LocationSummary* locations =
1895 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1896 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1897 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1898 } else {
1899 locations->SetInAt(0, Location::RequiresRegister());
1900 }
1901 locations->SetInAt(1, Location::RequiresRegister());
1902 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1903}
1904
1905void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1906 HArm64DataProcWithShifterOp* instruction) {
1907 Primitive::Type type = instruction->GetType();
1908 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1909 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1910 Register out = OutputRegister(instruction);
1911 Register left;
1912 if (kind != HInstruction::kNeg) {
1913 left = InputRegisterAt(instruction, 0);
1914 }
1915 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1916 // shifter operand operation, the IR generating `right_reg` (input to the type
1917 // conversion) can have a different type from the current instruction's type,
1918 // so we manually indicate the type.
1919 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001920 int64_t shift_amount = instruction->GetShiftAmount() &
1921 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001922
1923 Operand right_operand(0);
1924
1925 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1926 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1927 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1928 } else {
1929 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1930 }
1931
1932 // Logical binary operations do not support extension operations in the
1933 // operand. Note that VIXL would still manage if it was passed by generating
1934 // the extension as a separate instruction.
1935 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1936 DCHECK(!right_operand.IsExtendedRegister() ||
1937 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1938 kind != HInstruction::kNeg));
1939 switch (kind) {
1940 case HInstruction::kAdd:
1941 __ Add(out, left, right_operand);
1942 break;
1943 case HInstruction::kAnd:
1944 __ And(out, left, right_operand);
1945 break;
1946 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001947 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001948 __ Neg(out, right_operand);
1949 break;
1950 case HInstruction::kOr:
1951 __ Orr(out, left, right_operand);
1952 break;
1953 case HInstruction::kSub:
1954 __ Sub(out, left, right_operand);
1955 break;
1956 case HInstruction::kXor:
1957 __ Eor(out, left, right_operand);
1958 break;
1959 default:
1960 LOG(FATAL) << "Unexpected operation kind: " << kind;
1961 UNREACHABLE();
1962 }
1963}
1964
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001965void LocationsBuilderARM64::VisitArm64IntermediateAddress(HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001966 // The read barrier instrumentation does not support the
1967 // HArm64IntermediateAddress instruction yet.
1968 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001969 LocationSummary* locations =
1970 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1971 locations->SetInAt(0, Location::RequiresRegister());
1972 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1973 locations->SetOut(Location::RequiresRegister());
1974}
1975
1976void InstructionCodeGeneratorARM64::VisitArm64IntermediateAddress(
1977 HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001978 // The read barrier instrumentation does not support the
1979 // HArm64IntermediateAddress instruction yet.
1980 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001981 __ Add(OutputRegister(instruction),
1982 InputRegisterAt(instruction, 0),
1983 Operand(InputOperandAt(instruction, 1)));
1984}
1985
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001986void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001987 LocationSummary* locations =
1988 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001989 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
1990 if (instr->GetOpKind() == HInstruction::kSub &&
1991 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00001992 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001993 // Don't allocate register for Mneg instruction.
1994 } else {
1995 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
1996 Location::RequiresRegister());
1997 }
1998 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
1999 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002000 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2001}
2002
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002003void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002004 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002005 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2006 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002007
2008 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2009 // This fixup should be carried out for all multiply-accumulate instructions:
2010 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2011 if (instr->GetType() == Primitive::kPrimLong &&
2012 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2013 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
2014 vixl::Instruction* prev = masm->GetCursorAddress<vixl::Instruction*>() - vixl::kInstructionSize;
2015 if (prev->IsLoadOrStore()) {
2016 // Make sure we emit only exactly one nop.
2017 vixl::CodeBufferCheckScope scope(masm,
2018 vixl::kInstructionSize,
2019 vixl::CodeBufferCheckScope::kCheck,
2020 vixl::CodeBufferCheckScope::kExactSize);
2021 __ nop();
2022 }
2023 }
2024
2025 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002026 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002027 __ Madd(res, mul_left, mul_right, accumulator);
2028 } else {
2029 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002030 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002031 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002032 __ Mneg(res, mul_left, mul_right);
2033 } else {
2034 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2035 __ Msub(res, mul_left, mul_right, accumulator);
2036 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002037 }
2038}
2039
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002040void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002041 bool object_array_get_with_read_barrier =
2042 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002043 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002044 new (GetGraph()->GetArena()) LocationSummary(instruction,
2045 object_array_get_with_read_barrier ?
2046 LocationSummary::kCallOnSlowPath :
2047 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002048 locations->SetInAt(0, Location::RequiresRegister());
2049 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002050 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2051 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2052 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002053 // The output overlaps in the case of an object array get with
2054 // read barriers enabled: we do not want the move to overwrite the
2055 // array's location, as we need it to emit the read barrier.
2056 locations->SetOut(
2057 Location::RequiresRegister(),
2058 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002059 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002060}
2061
2062void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002063 Primitive::Type type = instruction->GetType();
2064 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002065 LocationSummary* locations = instruction->GetLocations();
2066 Location index = locations->InAt(1);
2067 uint32_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Roland Levillain44015862016-01-22 11:47:17 +00002068 Location out = locations->Out();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002069
Alexandre Ramesd921d642015-04-16 15:07:16 +01002070 MacroAssembler* masm = GetVIXLAssembler();
2071 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002072 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002073 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002074
Roland Levillain44015862016-01-22 11:47:17 +00002075 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2076 // Object ArrayGet with Baker's read barrier case.
2077 Register temp = temps.AcquireW();
2078 // The read barrier instrumentation does not support the
2079 // HArm64IntermediateAddress instruction yet.
2080 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
2081 // Note that a potential implicit null check is handled in the
2082 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2083 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2084 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002085 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002086 // General case.
2087 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002088 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002089 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2090 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002091 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002092 Register temp = temps.AcquireSameSizeAs(obj);
2093 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
2094 // The read barrier instrumentation does not support the
2095 // HArm64IntermediateAddress instruction yet.
2096 DCHECK(!kEmitCompilerReadBarrier);
2097 // We do not need to compute the intermediate address from the array: the
2098 // input instruction has done it already. See the comment in
2099 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2100 if (kIsDebugBuild) {
2101 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2102 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2103 }
2104 temp = obj;
2105 } else {
2106 __ Add(temp, obj, offset);
2107 }
2108 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2109 }
2110
2111 codegen_->Load(type, OutputCPURegister(instruction), source);
2112 codegen_->MaybeRecordImplicitNullCheck(instruction);
2113
2114 if (type == Primitive::kPrimNot) {
2115 static_assert(
2116 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2117 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2118 Location obj_loc = locations->InAt(0);
2119 if (index.IsConstant()) {
2120 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2121 } else {
2122 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2123 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002124 }
Roland Levillain4d027112015-07-01 15:41:14 +01002125 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002126}
2127
Alexandre Rames5319def2014-10-23 10:03:10 +01002128void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2129 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2130 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002131 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002132}
2133
2134void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Marko51c103e2016-04-28 13:10:02 +01002135 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002136 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Marko51c103e2016-04-28 13:10:02 +01002137 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002138 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002139}
2140
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002141void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002142 Primitive::Type value_type = instruction->GetComponentType();
2143
2144 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002145 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2146 instruction,
Vladimir Markoefafb122016-08-25 15:20:47 +01002147 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002148 LocationSummary::kCallOnSlowPath :
2149 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002150 locations->SetInAt(0, Location::RequiresRegister());
2151 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002152 if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002153 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002154 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002155 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002156 }
2157}
2158
2159void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2160 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002161 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002162 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002163 bool needs_write_barrier =
2164 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002165
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002166 Register array = InputRegisterAt(instruction, 0);
2167 CPURegister value = InputCPURegisterAt(instruction, 2);
2168 CPURegister source = value;
2169 Location index = locations->InAt(1);
2170 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2171 MemOperand destination = HeapOperand(array);
2172 MacroAssembler* masm = GetVIXLAssembler();
2173 BlockPoolsScope block_pools(masm);
2174
2175 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002176 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002177 if (index.IsConstant()) {
2178 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2179 destination = HeapOperand(array, offset);
2180 } else {
2181 UseScratchRegisterScope temps(masm);
2182 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002183 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00002184 // The read barrier instrumentation does not support the
2185 // HArm64IntermediateAddress instruction yet.
2186 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002187 // We do not need to compute the intermediate address from the array: the
2188 // input instruction has done it already. See the comment in
2189 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2190 if (kIsDebugBuild) {
2191 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2192 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2193 }
2194 temp = array;
2195 } else {
2196 __ Add(temp, array, offset);
2197 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002198 destination = HeapOperand(temp,
2199 XRegisterFrom(index),
2200 LSL,
2201 Primitive::ComponentSizeShift(value_type));
2202 }
2203 codegen_->Store(value_type, value, destination);
2204 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002205 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002206 DCHECK(needs_write_barrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002207 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002208 vixl::Label done;
2209 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002210 {
2211 // We use a block to end the scratch scope before the write barrier, thus
2212 // freeing the temporary registers so they can be used in `MarkGCCard`.
2213 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002214 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002215 if (index.IsConstant()) {
2216 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002217 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002218 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002219 destination = HeapOperand(temp,
2220 XRegisterFrom(index),
2221 LSL,
2222 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002223 }
2224
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002225 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2226 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2227 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2228
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002229 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002230 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2231 codegen_->AddSlowPath(slow_path);
2232 if (instruction->GetValueCanBeNull()) {
2233 vixl::Label non_zero;
2234 __ Cbnz(Register(value), &non_zero);
2235 if (!index.IsConstant()) {
2236 __ Add(temp, array, offset);
2237 }
2238 __ Str(wzr, destination);
2239 codegen_->MaybeRecordImplicitNullCheck(instruction);
2240 __ B(&done);
2241 __ Bind(&non_zero);
2242 }
2243
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002244 if (kEmitCompilerReadBarrier) {
2245 // When read barriers are enabled, the type checking
2246 // instrumentation requires two read barriers:
2247 //
2248 // __ Mov(temp2, temp);
2249 // // /* HeapReference<Class> */ temp = temp->component_type_
2250 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002251 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002252 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2253 //
2254 // // /* HeapReference<Class> */ temp2 = value->klass_
2255 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002256 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002257 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2258 //
2259 // __ Cmp(temp, temp2);
2260 //
2261 // However, the second read barrier may trash `temp`, as it
2262 // is a temporary register, and as such would not be saved
2263 // along with live registers before calling the runtime (nor
2264 // restored afterwards). So in this case, we bail out and
2265 // delegate the work to the array set slow path.
2266 //
2267 // TODO: Extend the register allocator to support a new
2268 // "(locally) live temp" location so as to avoid always
2269 // going into the slow path when read barriers are enabled.
2270 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002271 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002272 Register temp2 = temps.AcquireSameSizeAs(array);
2273 // /* HeapReference<Class> */ temp = array->klass_
2274 __ Ldr(temp, HeapOperand(array, class_offset));
2275 codegen_->MaybeRecordImplicitNullCheck(instruction);
2276 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2277
2278 // /* HeapReference<Class> */ temp = temp->component_type_
2279 __ Ldr(temp, HeapOperand(temp, component_offset));
2280 // /* HeapReference<Class> */ temp2 = value->klass_
2281 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2282 // If heap poisoning is enabled, no need to unpoison `temp`
2283 // nor `temp2`, as we are comparing two poisoned references.
2284 __ Cmp(temp, temp2);
2285
2286 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2287 vixl::Label do_put;
2288 __ B(eq, &do_put);
2289 // If heap poisoning is enabled, the `temp` reference has
2290 // not been unpoisoned yet; unpoison it now.
2291 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2292
2293 // /* HeapReference<Class> */ temp = temp->super_class_
2294 __ Ldr(temp, HeapOperand(temp, super_offset));
2295 // If heap poisoning is enabled, no need to unpoison
2296 // `temp`, as we are comparing against null below.
2297 __ Cbnz(temp, slow_path->GetEntryLabel());
2298 __ Bind(&do_put);
2299 } else {
2300 __ B(ne, slow_path->GetEntryLabel());
2301 }
2302 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002303 }
2304 }
2305
2306 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002307 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002308 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002309 __ Mov(temp2, value.W());
2310 GetAssembler()->PoisonHeapReference(temp2);
2311 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002312 }
2313
2314 if (!index.IsConstant()) {
2315 __ Add(temp, array, offset);
2316 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002317 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002318
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002319 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002320 codegen_->MaybeRecordImplicitNullCheck(instruction);
2321 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002322 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002323
2324 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2325
2326 if (done.IsLinked()) {
2327 __ Bind(&done);
2328 }
2329
2330 if (slow_path != nullptr) {
2331 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002332 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002333 }
2334}
2335
Alexandre Rames67555f72014-11-18 10:55:16 +00002336void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002337 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2338 ? LocationSummary::kCallOnSlowPath
2339 : LocationSummary::kNoCall;
2340 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002341 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002342 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002343 if (instruction->HasUses()) {
2344 locations->SetOut(Location::SameAsFirstInput());
2345 }
2346}
2347
2348void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002349 BoundsCheckSlowPathARM64* slow_path =
2350 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002351 codegen_->AddSlowPath(slow_path);
2352
2353 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2354 __ B(slow_path->GetEntryLabel(), hs);
2355}
2356
Alexandre Rames67555f72014-11-18 10:55:16 +00002357void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2358 LocationSummary* locations =
2359 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2360 locations->SetInAt(0, Location::RequiresRegister());
2361 if (check->HasUses()) {
2362 locations->SetOut(Location::SameAsFirstInput());
2363 }
2364}
2365
2366void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2367 // We assume the class is not null.
2368 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2369 check->GetLoadClass(), check, check->GetDexPc(), true);
2370 codegen_->AddSlowPath(slow_path);
2371 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2372}
2373
Roland Levillain1a653882016-03-18 18:05:57 +00002374static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2375 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2376 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2377}
2378
2379void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2380 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2381 Location rhs_loc = instruction->GetLocations()->InAt(1);
2382 if (rhs_loc.IsConstant()) {
2383 // 0.0 is the only immediate that can be encoded directly in
2384 // an FCMP instruction.
2385 //
2386 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2387 // specify that in a floating-point comparison, positive zero
2388 // and negative zero are considered equal, so we can use the
2389 // literal 0.0 for both cases here.
2390 //
2391 // Note however that some methods (Float.equal, Float.compare,
2392 // Float.compareTo, Double.equal, Double.compare,
2393 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2394 // StrictMath.min) consider 0.0 to be (strictly) greater than
2395 // -0.0. So if we ever translate calls to these methods into a
2396 // HCompare instruction, we must handle the -0.0 case with
2397 // care here.
2398 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2399 __ Fcmp(lhs_reg, 0.0);
2400 } else {
2401 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2402 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002403}
2404
Serban Constantinescu02164b32014-11-13 14:05:07 +00002405void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002406 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002407 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2408 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002409 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002410 case Primitive::kPrimBoolean:
2411 case Primitive::kPrimByte:
2412 case Primitive::kPrimShort:
2413 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002414 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002415 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002416 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002417 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002418 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2419 break;
2420 }
2421 case Primitive::kPrimFloat:
2422 case Primitive::kPrimDouble: {
2423 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002424 locations->SetInAt(1,
2425 IsFloatingPointZeroConstant(compare->InputAt(1))
2426 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2427 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002428 locations->SetOut(Location::RequiresRegister());
2429 break;
2430 }
2431 default:
2432 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2433 }
2434}
2435
2436void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2437 Primitive::Type in_type = compare->InputAt(0)->GetType();
2438
2439 // 0 if: left == right
2440 // 1 if: left > right
2441 // -1 if: left < right
2442 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002443 case Primitive::kPrimBoolean:
2444 case Primitive::kPrimByte:
2445 case Primitive::kPrimShort:
2446 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002447 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002448 case Primitive::kPrimLong: {
2449 Register result = OutputRegister(compare);
2450 Register left = InputRegisterAt(compare, 0);
2451 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002452 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002453 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2454 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002455 break;
2456 }
2457 case Primitive::kPrimFloat:
2458 case Primitive::kPrimDouble: {
2459 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002460 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002461 __ Cset(result, ne);
2462 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002463 break;
2464 }
2465 default:
2466 LOG(FATAL) << "Unimplemented compare type " << in_type;
2467 }
2468}
2469
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002470void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002471 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002472
2473 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2474 locations->SetInAt(0, Location::RequiresFpuRegister());
2475 locations->SetInAt(1,
2476 IsFloatingPointZeroConstant(instruction->InputAt(1))
2477 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2478 : Location::RequiresFpuRegister());
2479 } else {
2480 // Integer cases.
2481 locations->SetInAt(0, Location::RequiresRegister());
2482 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2483 }
2484
David Brazdilb3e773e2016-01-26 11:28:37 +00002485 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002486 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002487 }
2488}
2489
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002490void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002491 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002492 return;
2493 }
2494
2495 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002496 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002497 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002498
Roland Levillain7f63c522015-07-13 15:54:55 +00002499 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002500 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002501 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002502 } else {
2503 // Integer cases.
2504 Register lhs = InputRegisterAt(instruction, 0);
2505 Operand rhs = InputOperandAt(instruction, 1);
2506 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002507 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002508 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002509}
2510
2511#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2512 M(Equal) \
2513 M(NotEqual) \
2514 M(LessThan) \
2515 M(LessThanOrEqual) \
2516 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002517 M(GreaterThanOrEqual) \
2518 M(Below) \
2519 M(BelowOrEqual) \
2520 M(Above) \
2521 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002522#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002523void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2524void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002525FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002526#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002527#undef FOR_EACH_CONDITION_INSTRUCTION
2528
Zheng Xuc6667102015-05-15 16:08:45 +08002529void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2530 DCHECK(instruction->IsDiv() || instruction->IsRem());
2531
2532 LocationSummary* locations = instruction->GetLocations();
2533 Location second = locations->InAt(1);
2534 DCHECK(second.IsConstant());
2535
2536 Register out = OutputRegister(instruction);
2537 Register dividend = InputRegisterAt(instruction, 0);
2538 int64_t imm = Int64FromConstant(second.GetConstant());
2539 DCHECK(imm == 1 || imm == -1);
2540
2541 if (instruction->IsRem()) {
2542 __ Mov(out, 0);
2543 } else {
2544 if (imm == 1) {
2545 __ Mov(out, dividend);
2546 } else {
2547 __ Neg(out, dividend);
2548 }
2549 }
2550}
2551
2552void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2553 DCHECK(instruction->IsDiv() || instruction->IsRem());
2554
2555 LocationSummary* locations = instruction->GetLocations();
2556 Location second = locations->InAt(1);
2557 DCHECK(second.IsConstant());
2558
2559 Register out = OutputRegister(instruction);
2560 Register dividend = InputRegisterAt(instruction, 0);
2561 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002562 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002563 int ctz_imm = CTZ(abs_imm);
2564
2565 UseScratchRegisterScope temps(GetVIXLAssembler());
2566 Register temp = temps.AcquireSameSizeAs(out);
2567
2568 if (instruction->IsDiv()) {
2569 __ Add(temp, dividend, abs_imm - 1);
2570 __ Cmp(dividend, 0);
2571 __ Csel(out, temp, dividend, lt);
2572 if (imm > 0) {
2573 __ Asr(out, out, ctz_imm);
2574 } else {
2575 __ Neg(out, Operand(out, ASR, ctz_imm));
2576 }
2577 } else {
2578 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2579 __ Asr(temp, dividend, bits - 1);
2580 __ Lsr(temp, temp, bits - ctz_imm);
2581 __ Add(out, dividend, temp);
2582 __ And(out, out, abs_imm - 1);
2583 __ Sub(out, out, temp);
2584 }
2585}
2586
2587void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2588 DCHECK(instruction->IsDiv() || instruction->IsRem());
2589
2590 LocationSummary* locations = instruction->GetLocations();
2591 Location second = locations->InAt(1);
2592 DCHECK(second.IsConstant());
2593
2594 Register out = OutputRegister(instruction);
2595 Register dividend = InputRegisterAt(instruction, 0);
2596 int64_t imm = Int64FromConstant(second.GetConstant());
2597
2598 Primitive::Type type = instruction->GetResultType();
2599 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2600
2601 int64_t magic;
2602 int shift;
2603 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2604
2605 UseScratchRegisterScope temps(GetVIXLAssembler());
2606 Register temp = temps.AcquireSameSizeAs(out);
2607
2608 // temp = get_high(dividend * magic)
2609 __ Mov(temp, magic);
2610 if (type == Primitive::kPrimLong) {
2611 __ Smulh(temp, dividend, temp);
2612 } else {
2613 __ Smull(temp.X(), dividend, temp);
2614 __ Lsr(temp.X(), temp.X(), 32);
2615 }
2616
2617 if (imm > 0 && magic < 0) {
2618 __ Add(temp, temp, dividend);
2619 } else if (imm < 0 && magic > 0) {
2620 __ Sub(temp, temp, dividend);
2621 }
2622
2623 if (shift != 0) {
2624 __ Asr(temp, temp, shift);
2625 }
2626
2627 if (instruction->IsDiv()) {
2628 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2629 } else {
2630 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2631 // TODO: Strength reduction for msub.
2632 Register temp_imm = temps.AcquireSameSizeAs(out);
2633 __ Mov(temp_imm, imm);
2634 __ Msub(out, temp, temp_imm, dividend);
2635 }
2636}
2637
2638void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2639 DCHECK(instruction->IsDiv() || instruction->IsRem());
2640 Primitive::Type type = instruction->GetResultType();
2641 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2642
2643 LocationSummary* locations = instruction->GetLocations();
2644 Register out = OutputRegister(instruction);
2645 Location second = locations->InAt(1);
2646
2647 if (second.IsConstant()) {
2648 int64_t imm = Int64FromConstant(second.GetConstant());
2649
2650 if (imm == 0) {
2651 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2652 } else if (imm == 1 || imm == -1) {
2653 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002654 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002655 DivRemByPowerOfTwo(instruction);
2656 } else {
2657 DCHECK(imm <= -2 || imm >= 2);
2658 GenerateDivRemWithAnyConstant(instruction);
2659 }
2660 } else {
2661 Register dividend = InputRegisterAt(instruction, 0);
2662 Register divisor = InputRegisterAt(instruction, 1);
2663 if (instruction->IsDiv()) {
2664 __ Sdiv(out, dividend, divisor);
2665 } else {
2666 UseScratchRegisterScope temps(GetVIXLAssembler());
2667 Register temp = temps.AcquireSameSizeAs(out);
2668 __ Sdiv(temp, dividend, divisor);
2669 __ Msub(out, temp, divisor, dividend);
2670 }
2671 }
2672}
2673
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002674void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2675 LocationSummary* locations =
2676 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2677 switch (div->GetResultType()) {
2678 case Primitive::kPrimInt:
2679 case Primitive::kPrimLong:
2680 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002681 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002682 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2683 break;
2684
2685 case Primitive::kPrimFloat:
2686 case Primitive::kPrimDouble:
2687 locations->SetInAt(0, Location::RequiresFpuRegister());
2688 locations->SetInAt(1, Location::RequiresFpuRegister());
2689 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2690 break;
2691
2692 default:
2693 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2694 }
2695}
2696
2697void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2698 Primitive::Type type = div->GetResultType();
2699 switch (type) {
2700 case Primitive::kPrimInt:
2701 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002702 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002703 break;
2704
2705 case Primitive::kPrimFloat:
2706 case Primitive::kPrimDouble:
2707 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2708 break;
2709
2710 default:
2711 LOG(FATAL) << "Unexpected div type " << type;
2712 }
2713}
2714
Alexandre Rames67555f72014-11-18 10:55:16 +00002715void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002716 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2717 ? LocationSummary::kCallOnSlowPath
2718 : LocationSummary::kNoCall;
2719 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002720 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2721 if (instruction->HasUses()) {
2722 locations->SetOut(Location::SameAsFirstInput());
2723 }
2724}
2725
2726void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2727 SlowPathCodeARM64* slow_path =
2728 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2729 codegen_->AddSlowPath(slow_path);
2730 Location value = instruction->GetLocations()->InAt(0);
2731
Alexandre Rames3e69f162014-12-10 10:36:50 +00002732 Primitive::Type type = instruction->GetType();
2733
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002734 if (!Primitive::IsIntegralType(type)) {
2735 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002736 return;
2737 }
2738
Alexandre Rames67555f72014-11-18 10:55:16 +00002739 if (value.IsConstant()) {
2740 int64_t divisor = Int64ConstantFrom(value);
2741 if (divisor == 0) {
2742 __ B(slow_path->GetEntryLabel());
2743 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002744 // A division by a non-null constant is valid. We don't need to perform
2745 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002746 }
2747 } else {
2748 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2749 }
2750}
2751
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002752void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2753 LocationSummary* locations =
2754 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2755 locations->SetOut(Location::ConstantLocation(constant));
2756}
2757
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002758void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2759 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002760 // Will be generated at use site.
2761}
2762
Alexandre Rames5319def2014-10-23 10:03:10 +01002763void LocationsBuilderARM64::VisitExit(HExit* exit) {
2764 exit->SetLocations(nullptr);
2765}
2766
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002767void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002768}
2769
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002770void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2771 LocationSummary* locations =
2772 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2773 locations->SetOut(Location::ConstantLocation(constant));
2774}
2775
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002776void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002777 // Will be generated at use site.
2778}
2779
David Brazdilfc6a86a2015-06-26 10:33:45 +00002780void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002781 DCHECK(!successor->IsExitBlock());
2782 HBasicBlock* block = got->GetBlock();
2783 HInstruction* previous = got->GetPrevious();
2784 HLoopInformation* info = block->GetLoopInformation();
2785
David Brazdil46e2a392015-03-16 17:31:52 +00002786 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002787 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2788 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2789 return;
2790 }
2791 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2792 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2793 }
2794 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002795 __ B(codegen_->GetLabelOf(successor));
2796 }
2797}
2798
David Brazdilfc6a86a2015-06-26 10:33:45 +00002799void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2800 got->SetLocations(nullptr);
2801}
2802
2803void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2804 HandleGoto(got, got->GetSuccessor());
2805}
2806
2807void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2808 try_boundary->SetLocations(nullptr);
2809}
2810
2811void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2812 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2813 if (!successor->IsExitBlock()) {
2814 HandleGoto(try_boundary, successor);
2815 }
2816}
2817
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002818void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002819 size_t condition_input_index,
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002820 vixl::Label* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00002821 vixl::Label* false_target) {
2822 // FP branching requires both targets to be explicit. If either of the targets
2823 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
2824 vixl::Label fallthrough_target;
2825 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002826
David Brazdil0debae72015-11-12 18:37:00 +00002827 if (true_target == nullptr && false_target == nullptr) {
2828 // Nothing to do. The code always falls through.
2829 return;
2830 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002831 // Constant condition, statically compared against "true" (integer value 1).
2832 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002833 if (true_target != nullptr) {
2834 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002835 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002836 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002837 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002838 if (false_target != nullptr) {
2839 __ B(false_target);
2840 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002841 }
David Brazdil0debae72015-11-12 18:37:00 +00002842 return;
2843 }
2844
2845 // The following code generates these patterns:
2846 // (1) true_target == nullptr && false_target != nullptr
2847 // - opposite condition true => branch to false_target
2848 // (2) true_target != nullptr && false_target == nullptr
2849 // - condition true => branch to true_target
2850 // (3) true_target != nullptr && false_target != nullptr
2851 // - condition true => branch to true_target
2852 // - branch to false_target
2853 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002854 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002855 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002856 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002857 if (true_target == nullptr) {
2858 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2859 } else {
2860 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2861 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002862 } else {
2863 // The condition instruction has not been materialized, use its inputs as
2864 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002865 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002866
David Brazdil0debae72015-11-12 18:37:00 +00002867 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002868 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002869 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002870 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002871 IfCondition opposite_condition = condition->GetOppositeCondition();
2872 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002873 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002874 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002875 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002876 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002877 // Integer cases.
2878 Register lhs = InputRegisterAt(condition, 0);
2879 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002880
2881 Condition arm64_cond;
2882 vixl::Label* non_fallthrough_target;
2883 if (true_target == nullptr) {
2884 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2885 non_fallthrough_target = false_target;
2886 } else {
2887 arm64_cond = ARM64Condition(condition->GetCondition());
2888 non_fallthrough_target = true_target;
2889 }
2890
Aart Bik086d27e2016-01-20 17:02:00 -08002891 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
2892 rhs.IsImmediate() && (rhs.immediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002893 switch (arm64_cond) {
2894 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002895 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002896 break;
2897 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002898 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002899 break;
2900 case lt:
2901 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002902 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002903 break;
2904 case ge:
2905 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002906 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002907 break;
2908 default:
2909 // Without the `static_cast` the compiler throws an error for
2910 // `-Werror=sign-promo`.
2911 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2912 }
2913 } else {
2914 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002915 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002916 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002917 }
2918 }
David Brazdil0debae72015-11-12 18:37:00 +00002919
2920 // If neither branch falls through (case 3), the conditional branch to `true_target`
2921 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2922 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002923 __ B(false_target);
2924 }
David Brazdil0debae72015-11-12 18:37:00 +00002925
2926 if (fallthrough_target.IsLinked()) {
2927 __ Bind(&fallthrough_target);
2928 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002929}
2930
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002931void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2932 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002933 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002934 locations->SetInAt(0, Location::RequiresRegister());
2935 }
2936}
2937
2938void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002939 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2940 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
2941 vixl::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2942 nullptr : codegen_->GetLabelOf(true_successor);
2943 vixl::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2944 nullptr : codegen_->GetLabelOf(false_successor);
2945 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002946}
2947
2948void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2949 LocationSummary* locations = new (GetGraph()->GetArena())
2950 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002951 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002952 locations->SetInAt(0, Location::RequiresRegister());
2953 }
2954}
2955
2956void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002957 SlowPathCodeARM64* slow_path =
2958 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002959 GenerateTestAndBranch(deoptimize,
2960 /* condition_input_index */ 0,
2961 slow_path->GetEntryLabel(),
2962 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002963}
2964
David Brazdilc0b601b2016-02-08 14:20:45 +00002965enum SelectVariant {
2966 kCsel,
2967 kCselFalseConst,
2968 kCselTrueConst,
2969 kFcsel,
2970};
2971
2972static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2973 return condition->IsCondition() &&
2974 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2975}
2976
2977static inline bool IsRecognizedCselConstant(HInstruction* constant) {
2978 if (constant->IsConstant()) {
2979 int64_t value = Int64FromConstant(constant->AsConstant());
2980 if ((value == -1) || (value == 0) || (value == 1)) {
2981 return true;
2982 }
2983 }
2984 return false;
2985}
2986
2987static inline SelectVariant GetSelectVariant(HSelect* select) {
2988 if (Primitive::IsFloatingPointType(select->GetType())) {
2989 return kFcsel;
2990 } else if (IsRecognizedCselConstant(select->GetFalseValue())) {
2991 return kCselFalseConst;
2992 } else if (IsRecognizedCselConstant(select->GetTrueValue())) {
2993 return kCselTrueConst;
2994 } else {
2995 return kCsel;
2996 }
2997}
2998
2999static inline bool HasSwappedInputs(SelectVariant variant) {
3000 return variant == kCselTrueConst;
3001}
3002
3003static inline Condition GetConditionForSelect(HCondition* condition, SelectVariant variant) {
3004 IfCondition cond = HasSwappedInputs(variant) ? condition->GetOppositeCondition()
3005 : condition->GetCondition();
3006 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3007 : ARM64Condition(cond);
3008}
3009
David Brazdil74eb1b22015-12-14 11:44:01 +00003010void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3011 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
David Brazdilc0b601b2016-02-08 14:20:45 +00003012 switch (GetSelectVariant(select)) {
3013 case kCsel:
3014 locations->SetInAt(0, Location::RequiresRegister());
3015 locations->SetInAt(1, Location::RequiresRegister());
3016 locations->SetOut(Location::RequiresRegister());
3017 break;
3018 case kCselFalseConst:
3019 locations->SetInAt(0, Location::ConstantLocation(select->InputAt(0)->AsConstant()));
3020 locations->SetInAt(1, Location::RequiresRegister());
3021 locations->SetOut(Location::RequiresRegister());
3022 break;
3023 case kCselTrueConst:
3024 locations->SetInAt(0, Location::RequiresRegister());
3025 locations->SetInAt(1, Location::ConstantLocation(select->InputAt(1)->AsConstant()));
3026 locations->SetOut(Location::RequiresRegister());
3027 break;
3028 case kFcsel:
3029 locations->SetInAt(0, Location::RequiresFpuRegister());
3030 locations->SetInAt(1, Location::RequiresFpuRegister());
3031 locations->SetOut(Location::RequiresFpuRegister());
3032 break;
David Brazdil74eb1b22015-12-14 11:44:01 +00003033 }
3034 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3035 locations->SetInAt(2, Location::RequiresRegister());
3036 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003037}
3038
3039void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003040 HInstruction* cond = select->GetCondition();
3041 SelectVariant variant = GetSelectVariant(select);
3042 Condition csel_cond;
3043
3044 if (IsBooleanValueOrMaterializedCondition(cond)) {
3045 if (cond->IsCondition() && cond->GetNext() == select) {
3046 // Condition codes set from previous instruction.
3047 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3048 } else {
3049 __ Cmp(InputRegisterAt(select, 2), 0);
3050 csel_cond = HasSwappedInputs(variant) ? eq : ne;
3051 }
3052 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003053 GenerateFcmp(cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003054 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3055 } else {
3056 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
3057 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3058 }
3059
3060 switch (variant) {
3061 case kCsel:
3062 case kCselFalseConst:
3063 __ Csel(OutputRegister(select),
3064 InputRegisterAt(select, 1),
3065 InputOperandAt(select, 0),
3066 csel_cond);
3067 break;
3068 case kCselTrueConst:
3069 __ Csel(OutputRegister(select),
3070 InputRegisterAt(select, 0),
3071 InputOperandAt(select, 1),
3072 csel_cond);
3073 break;
3074 case kFcsel:
3075 __ Fcsel(OutputFPRegister(select),
3076 InputFPRegisterAt(select, 1),
3077 InputFPRegisterAt(select, 0),
3078 csel_cond);
3079 break;
3080 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003081}
3082
David Srbecky0cf44932015-12-09 14:09:59 +00003083void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3084 new (GetGraph()->GetArena()) LocationSummary(info);
3085}
3086
David Srbeckyd28f4a02016-03-14 17:14:24 +00003087void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3088 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003089}
3090
3091void CodeGeneratorARM64::GenerateNop() {
3092 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003093}
3094
Alexandre Rames5319def2014-10-23 10:03:10 +01003095void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003096 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003097}
3098
3099void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003100 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003101}
3102
3103void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003104 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003105}
3106
3107void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003108 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003109}
3110
Roland Levillain44015862016-01-22 11:47:17 +00003111static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3112 return kEmitCompilerReadBarrier &&
3113 (kUseBakerReadBarrier ||
3114 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3115 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3116 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3117}
3118
Alexandre Rames67555f72014-11-18 10:55:16 +00003119void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003120 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003121 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3122 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003123 case TypeCheckKind::kExactCheck:
3124 case TypeCheckKind::kAbstractClassCheck:
3125 case TypeCheckKind::kClassHierarchyCheck:
3126 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003127 call_kind =
3128 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003129 break;
3130 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003131 case TypeCheckKind::kUnresolvedCheck:
3132 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003133 call_kind = LocationSummary::kCallOnSlowPath;
3134 break;
3135 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003136
Alexandre Rames67555f72014-11-18 10:55:16 +00003137 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003138 locations->SetInAt(0, Location::RequiresRegister());
3139 locations->SetInAt(1, Location::RequiresRegister());
3140 // The "out" register is used as a temporary, so it overlaps with the inputs.
3141 // Note that TypeCheckSlowPathARM64 uses this register too.
3142 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3143 // When read barriers are enabled, we need a temporary register for
3144 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003145 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003146 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003147 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003148}
3149
3150void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003151 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003152 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003153 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003154 Register obj = InputRegisterAt(instruction, 0);
3155 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003156 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003157 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003158 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3159 locations->GetTemp(0) :
3160 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003161 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3162 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3163 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3164 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003165
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003166 vixl::Label done, zero;
3167 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003168
3169 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003170 // Avoid null check if we know `obj` is not null.
3171 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003172 __ Cbz(obj, &zero);
3173 }
3174
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003175 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003176 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003177
Roland Levillain44015862016-01-22 11:47:17 +00003178 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003179 case TypeCheckKind::kExactCheck: {
3180 __ Cmp(out, cls);
3181 __ Cset(out, eq);
3182 if (zero.IsLinked()) {
3183 __ B(&done);
3184 }
3185 break;
3186 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003187
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003188 case TypeCheckKind::kAbstractClassCheck: {
3189 // If the class is abstract, we eagerly fetch the super class of the
3190 // object to avoid doing a comparison we know will fail.
3191 vixl::Label loop, success;
3192 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003193 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003194 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003195 // If `out` is null, we use it for the result, and jump to `done`.
3196 __ Cbz(out, &done);
3197 __ Cmp(out, cls);
3198 __ B(ne, &loop);
3199 __ Mov(out, 1);
3200 if (zero.IsLinked()) {
3201 __ B(&done);
3202 }
3203 break;
3204 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003205
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003206 case TypeCheckKind::kClassHierarchyCheck: {
3207 // Walk over the class hierarchy to find a match.
3208 vixl::Label loop, success;
3209 __ Bind(&loop);
3210 __ Cmp(out, cls);
3211 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003212 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003213 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003214 __ Cbnz(out, &loop);
3215 // If `out` is null, we use it for the result, and jump to `done`.
3216 __ B(&done);
3217 __ Bind(&success);
3218 __ Mov(out, 1);
3219 if (zero.IsLinked()) {
3220 __ B(&done);
3221 }
3222 break;
3223 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003224
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003225 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003226 // Do an exact check.
3227 vixl::Label exact_check;
3228 __ Cmp(out, cls);
3229 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003230 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003231 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003232 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003233 // If `out` is null, we use it for the result, and jump to `done`.
3234 __ Cbz(out, &done);
3235 __ Ldrh(out, HeapOperand(out, primitive_offset));
3236 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3237 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003238 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003239 __ Mov(out, 1);
3240 __ B(&done);
3241 break;
3242 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003243
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003244 case TypeCheckKind::kArrayCheck: {
3245 __ Cmp(out, cls);
3246 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003247 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3248 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003249 codegen_->AddSlowPath(slow_path);
3250 __ B(ne, slow_path->GetEntryLabel());
3251 __ Mov(out, 1);
3252 if (zero.IsLinked()) {
3253 __ B(&done);
3254 }
3255 break;
3256 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003257
Calin Juravle98893e12015-10-02 21:05:03 +01003258 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003259 case TypeCheckKind::kInterfaceCheck: {
3260 // Note that we indeed only call on slow path, but we always go
3261 // into the slow path for the unresolved and interface check
3262 // cases.
3263 //
3264 // We cannot directly call the InstanceofNonTrivial runtime
3265 // entry point without resorting to a type checking slow path
3266 // here (i.e. by calling InvokeRuntime directly), as it would
3267 // require to assign fixed registers for the inputs of this
3268 // HInstanceOf instruction (following the runtime calling
3269 // convention), which might be cluttered by the potential first
3270 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003271 //
3272 // TODO: Introduce a new runtime entry point taking the object
3273 // to test (instead of its class) as argument, and let it deal
3274 // with the read barrier issues. This will let us refactor this
3275 // case of the `switch` code as it was previously (with a direct
3276 // call to the runtime not using a type checking slow path).
3277 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003278 DCHECK(locations->OnlyCallsOnSlowPath());
3279 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3280 /* is_fatal */ false);
3281 codegen_->AddSlowPath(slow_path);
3282 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003283 if (zero.IsLinked()) {
3284 __ B(&done);
3285 }
3286 break;
3287 }
3288 }
3289
3290 if (zero.IsLinked()) {
3291 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003292 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003293 }
3294
3295 if (done.IsLinked()) {
3296 __ Bind(&done);
3297 }
3298
3299 if (slow_path != nullptr) {
3300 __ Bind(slow_path->GetExitLabel());
3301 }
3302}
3303
3304void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3305 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3306 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3307
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003308 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3309 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003310 case TypeCheckKind::kExactCheck:
3311 case TypeCheckKind::kAbstractClassCheck:
3312 case TypeCheckKind::kClassHierarchyCheck:
3313 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003314 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3315 LocationSummary::kCallOnSlowPath :
3316 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003317 break;
3318 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003319 case TypeCheckKind::kUnresolvedCheck:
3320 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003321 call_kind = LocationSummary::kCallOnSlowPath;
3322 break;
3323 }
3324
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003325 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3326 locations->SetInAt(0, Location::RequiresRegister());
3327 locations->SetInAt(1, Location::RequiresRegister());
3328 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3329 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003330 // When read barriers are enabled, we need an additional temporary
3331 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003332 if (TypeCheckNeedsATemporary(type_check_kind)) {
3333 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003334 }
3335}
3336
3337void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003338 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003339 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003340 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003341 Register obj = InputRegisterAt(instruction, 0);
3342 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003343 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003344 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3345 locations->GetTemp(1) :
3346 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003347 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003348 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3349 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3350 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3351 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003352
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003353 bool is_type_check_slow_path_fatal =
3354 (type_check_kind == TypeCheckKind::kExactCheck ||
3355 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3356 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3357 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3358 !instruction->CanThrowIntoCatchBlock();
3359 SlowPathCodeARM64* type_check_slow_path =
3360 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3361 is_type_check_slow_path_fatal);
3362 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003363
3364 vixl::Label done;
3365 // Avoid null check if we know obj is not null.
3366 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003367 __ Cbz(obj, &done);
3368 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003369
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003370 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003371 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003372
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003373 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003374 case TypeCheckKind::kExactCheck:
3375 case TypeCheckKind::kArrayCheck: {
3376 __ Cmp(temp, cls);
3377 // Jump to slow path for throwing the exception or doing a
3378 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003379 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003380 break;
3381 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003382
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003383 case TypeCheckKind::kAbstractClassCheck: {
3384 // If the class is abstract, we eagerly fetch the super class of the
3385 // object to avoid doing a comparison we know will fail.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003386 vixl::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003387 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003388 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003389 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003390
3391 // If the class reference currently in `temp` is not null, jump
3392 // to the `compare_classes` label to compare it with the checked
3393 // class.
3394 __ Cbnz(temp, &compare_classes);
3395 // Otherwise, jump to the slow path to throw the exception.
3396 //
3397 // But before, move back the object's class into `temp` before
3398 // going into the slow path, as it has been overwritten in the
3399 // meantime.
3400 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003401 GenerateReferenceLoadTwoRegisters(
3402 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003403 __ B(type_check_slow_path->GetEntryLabel());
3404
3405 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003406 __ Cmp(temp, cls);
3407 __ B(ne, &loop);
3408 break;
3409 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003410
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003411 case TypeCheckKind::kClassHierarchyCheck: {
3412 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003413 vixl::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003414 __ Bind(&loop);
3415 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003416 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003417
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003418 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003419 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003420
3421 // If the class reference currently in `temp` is not null, jump
3422 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003423 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003424 // Otherwise, jump to the slow path to throw the exception.
3425 //
3426 // But before, move back the object's class into `temp` before
3427 // going into the slow path, as it has been overwritten in the
3428 // meantime.
3429 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003430 GenerateReferenceLoadTwoRegisters(
3431 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003432 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003433 break;
3434 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003435
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003436 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003437 // Do an exact check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003438 vixl::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003439 __ Cmp(temp, cls);
3440 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003441
3442 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003443 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003444 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003445
3446 // If the component type is not null (i.e. the object is indeed
3447 // an array), jump to label `check_non_primitive_component_type`
3448 // to further check that this component type is not a primitive
3449 // type.
3450 __ Cbnz(temp, &check_non_primitive_component_type);
3451 // Otherwise, jump to the slow path to throw the exception.
3452 //
3453 // But before, move back the object's class into `temp` before
3454 // going into the slow path, as it has been overwritten in the
3455 // meantime.
3456 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003457 GenerateReferenceLoadTwoRegisters(
3458 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003459 __ B(type_check_slow_path->GetEntryLabel());
3460
3461 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003462 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3463 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003464 __ Cbz(temp, &done);
3465 // Same comment as above regarding `temp` and the slow path.
3466 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003467 GenerateReferenceLoadTwoRegisters(
3468 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003469 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003470 break;
3471 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003472
Calin Juravle98893e12015-10-02 21:05:03 +01003473 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003474 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003475 // We always go into the type check slow path for the unresolved
3476 // and interface check cases.
3477 //
3478 // We cannot directly call the CheckCast runtime entry point
3479 // without resorting to a type checking slow path here (i.e. by
3480 // calling InvokeRuntime directly), as it would require to
3481 // assign fixed registers for the inputs of this HInstanceOf
3482 // instruction (following the runtime calling convention), which
3483 // might be cluttered by the potential first read barrier
3484 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003485 //
3486 // TODO: Introduce a new runtime entry point taking the object
3487 // to test (instead of its class) as argument, and let it deal
3488 // with the read barrier issues. This will let us refactor this
3489 // case of the `switch` code as it was previously (with a direct
3490 // call to the runtime not using a type checking slow path).
3491 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003492 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003493 break;
3494 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003495 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003496
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003497 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003498}
3499
Alexandre Rames5319def2014-10-23 10:03:10 +01003500void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3501 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3502 locations->SetOut(Location::ConstantLocation(constant));
3503}
3504
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003505void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003506 // Will be generated at use site.
3507}
3508
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003509void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3510 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3511 locations->SetOut(Location::ConstantLocation(constant));
3512}
3513
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003514void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003515 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003516}
3517
Calin Juravle175dc732015-08-25 15:42:32 +01003518void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3519 // The trampoline uses the same calling convention as dex calling conventions,
3520 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3521 // the method_idx.
3522 HandleInvoke(invoke);
3523}
3524
3525void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3526 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3527}
3528
Alexandre Rames5319def2014-10-23 10:03:10 +01003529void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003530 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003531 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003532}
3533
Alexandre Rames67555f72014-11-18 10:55:16 +00003534void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3535 HandleInvoke(invoke);
3536}
3537
3538void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3539 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003540 LocationSummary* locations = invoke->GetLocations();
3541 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003542 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003543 Offset class_offset = mirror::Object::ClassOffset();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003544 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003545
3546 // The register ip1 is required to be used for the hidden argument in
3547 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003548 MacroAssembler* masm = GetVIXLAssembler();
3549 UseScratchRegisterScope scratch_scope(masm);
3550 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003551 scratch_scope.Exclude(ip1);
3552 __ Mov(ip1, invoke->GetDexMethodIndex());
3553
Alexandre Rames67555f72014-11-18 10:55:16 +00003554 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003555 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003556 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003557 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003558 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003559 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003560 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003561 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003562 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003563 // Instead of simply (possibly) unpoisoning `temp` here, we should
3564 // emit a read barrier for the previous class reference load.
3565 // However this is not required in practice, as this is an
3566 // intermediate/temporary reference and because the current
3567 // concurrent copying collector keeps the from-space memory
3568 // intact/accessible until the end of the marking phase (the
3569 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003570 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkodf2d4f22016-06-30 09:18:25 +00003571 __ Ldr(temp,
3572 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3573 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
3574 invoke->GetImtIndex() % ImTable::kSize, kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003575 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003576 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003577 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003578 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003579 // lr();
3580 __ Blr(lr);
3581 DCHECK(!codegen_->IsLeafMethod());
3582 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3583}
3584
3585void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003586 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3587 if (intrinsic.TryDispatch(invoke)) {
3588 return;
3589 }
3590
Alexandre Rames67555f72014-11-18 10:55:16 +00003591 HandleInvoke(invoke);
3592}
3593
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003594void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003595 // Explicit clinit checks triggered by static invokes must have been pruned by
3596 // art::PrepareForRegisterAllocation.
3597 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003598
Andreas Gampe878d58c2015-01-15 23:24:00 -08003599 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3600 if (intrinsic.TryDispatch(invoke)) {
3601 return;
3602 }
3603
Alexandre Rames67555f72014-11-18 10:55:16 +00003604 HandleInvoke(invoke);
3605}
3606
Andreas Gampe878d58c2015-01-15 23:24:00 -08003607static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3608 if (invoke->GetLocations()->Intrinsified()) {
3609 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3610 intrinsic.Dispatch(invoke);
3611 return true;
3612 }
3613 return false;
3614}
3615
Vladimir Markodc151b22015-10-15 18:02:30 +01003616HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3617 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3618 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003619 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003620 return desired_dispatch_info;
3621}
3622
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003623void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003624 // For better instruction scheduling we load the direct code pointer before the method pointer.
3625 bool direct_code_loaded = false;
3626 switch (invoke->GetCodePtrLocation()) {
3627 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3628 // LR = code address from literal pool with link-time patch.
3629 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3630 direct_code_loaded = true;
3631 break;
3632 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3633 // LR = invoke->GetDirectCodePtr();
3634 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3635 direct_code_loaded = true;
3636 break;
3637 default:
3638 break;
3639 }
3640
Andreas Gampe878d58c2015-01-15 23:24:00 -08003641 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003642 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3643 switch (invoke->GetMethodLoadKind()) {
3644 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3645 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003646 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003647 break;
3648 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003649 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003650 break;
3651 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3652 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003653 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003654 break;
3655 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3656 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003657 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003658 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3659 break;
3660 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3661 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003662 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3663 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
3664 vixl::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003665 {
3666 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003667 __ Bind(adrp_label);
3668 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003669 }
Vladimir Marko58155012015-08-19 12:49:41 +00003670 // Add LDR with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003671 vixl::Label* ldr_label =
3672 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003673 {
3674 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003675 __ Bind(ldr_label);
3676 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003677 }
Vladimir Marko58155012015-08-19 12:49:41 +00003678 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003679 }
Vladimir Marko58155012015-08-19 12:49:41 +00003680 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003681 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003682 Register reg = XRegisterFrom(temp);
3683 Register method_reg;
3684 if (current_method.IsRegister()) {
3685 method_reg = XRegisterFrom(current_method);
3686 } else {
3687 DCHECK(invoke->GetLocations()->Intrinsified());
3688 DCHECK(!current_method.IsValid());
3689 method_reg = reg;
3690 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3691 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003692
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003693 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003694 __ Ldr(reg.X(),
3695 MemOperand(method_reg.X(),
3696 ArtMethod::DexCacheResolvedMethodsOffset(kArm64WordSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003697 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003698 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3699 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003700 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3701 break;
3702 }
3703 }
3704
3705 switch (invoke->GetCodePtrLocation()) {
3706 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3707 __ Bl(&frame_entry_label_);
3708 break;
3709 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3710 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
3711 vixl::Label* label = &relative_call_patches_.back().label;
Alexandre Rames6dc01742015-11-12 14:44:19 +00003712 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
3713 __ Bind(label);
3714 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003715 break;
3716 }
3717 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3718 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3719 // LR prepared above for better instruction scheduling.
3720 DCHECK(direct_code_loaded);
3721 // lr()
3722 __ Blr(lr);
3723 break;
3724 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3725 // LR = callee_method->entry_point_from_quick_compiled_code_;
3726 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003727 XRegisterFrom(callee_method),
Vladimir Marko58155012015-08-19 12:49:41 +00003728 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value()));
3729 // lr()
3730 __ Blr(lr);
3731 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003732 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003733
Andreas Gampe878d58c2015-01-15 23:24:00 -08003734 DCHECK(!IsLeafMethod());
3735}
3736
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003737void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003738 // Use the calling convention instead of the location of the receiver, as
3739 // intrinsics may have put the receiver in a different register. In the intrinsics
3740 // slow path, the arguments have been moved to the right place, so here we are
3741 // guaranteed that the receiver is the first register of the calling convention.
3742 InvokeDexCallingConvention calling_convention;
3743 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003744 Register temp = XRegisterFrom(temp_in);
3745 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3746 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3747 Offset class_offset = mirror::Object::ClassOffset();
3748 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
3749
3750 BlockPoolsScope block_pools(GetVIXLAssembler());
3751
3752 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003753 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003754 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003755 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003756 // Instead of simply (possibly) unpoisoning `temp` here, we should
3757 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003758 // intermediate/temporary reference and because the current
3759 // concurrent copying collector keeps the from-space memory
3760 // intact/accessible until the end of the marking phase (the
3761 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003762 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3763 // temp = temp->GetMethodAt(method_offset);
3764 __ Ldr(temp, MemOperand(temp, method_offset));
3765 // lr = temp->GetEntryPoint();
3766 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3767 // lr();
3768 __ Blr(lr);
3769}
3770
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003771vixl::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(const DexFile& dex_file,
3772 uint32_t string_index,
3773 vixl::Label* adrp_label) {
3774 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3775}
3776
3777vixl::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
3778 uint32_t element_offset,
3779 vixl::Label* adrp_label) {
3780 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3781}
3782
3783vixl::Label* CodeGeneratorARM64::NewPcRelativePatch(const DexFile& dex_file,
3784 uint32_t offset_or_index,
3785 vixl::Label* adrp_label,
3786 ArenaDeque<PcRelativePatchInfo>* patches) {
3787 // Add a patch entry and return the label.
3788 patches->emplace_back(dex_file, offset_or_index);
3789 PcRelativePatchInfo* info = &patches->back();
3790 vixl::Label* label = &info->label;
3791 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3792 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3793 return label;
3794}
3795
3796vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
3797 const DexFile& dex_file, uint32_t string_index) {
3798 return boot_image_string_patches_.GetOrCreate(
3799 StringReference(&dex_file, string_index),
3800 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3801}
3802
3803vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(uint64_t address) {
3804 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3805 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3806 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3807}
3808
3809vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(uint64_t address) {
3810 return DeduplicateUint64Literal(address);
3811}
3812
Vladimir Marko58155012015-08-19 12:49:41 +00003813void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3814 DCHECK(linker_patches->empty());
3815 size_t size =
3816 method_patches_.size() +
3817 call_patches_.size() +
3818 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003819 pc_relative_dex_cache_patches_.size() +
3820 boot_image_string_patches_.size() +
3821 pc_relative_string_patches_.size() +
3822 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003823 linker_patches->reserve(size);
3824 for (const auto& entry : method_patches_) {
3825 const MethodReference& target_method = entry.first;
3826 vixl::Literal<uint64_t>* literal = entry.second;
3827 linker_patches->push_back(LinkerPatch::MethodPatch(literal->offset(),
3828 target_method.dex_file,
3829 target_method.dex_method_index));
3830 }
3831 for (const auto& entry : call_patches_) {
3832 const MethodReference& target_method = entry.first;
3833 vixl::Literal<uint64_t>* literal = entry.second;
3834 linker_patches->push_back(LinkerPatch::CodePatch(literal->offset(),
3835 target_method.dex_file,
3836 target_method.dex_method_index));
3837 }
3838 for (const MethodPatchInfo<vixl::Label>& info : relative_call_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003839 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003840 info.target_method.dex_file,
3841 info.target_method.dex_method_index));
3842 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003843 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003844 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003845 &info.target_dex_file,
Alexandre Rames6dc01742015-11-12 14:44:19 +00003846 info.pc_insn_label->location(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003847 info.offset_or_index));
3848 }
3849 for (const auto& entry : boot_image_string_patches_) {
3850 const StringReference& target_string = entry.first;
3851 vixl::Literal<uint32_t>* literal = entry.second;
3852 linker_patches->push_back(LinkerPatch::StringPatch(literal->offset(),
3853 target_string.dex_file,
3854 target_string.string_index));
3855 }
3856 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
3857 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.location(),
3858 &info.target_dex_file,
3859 info.pc_insn_label->location(),
3860 info.offset_or_index));
3861 }
3862 for (const auto& entry : boot_image_address_patches_) {
3863 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
3864 vixl::Literal<uint32_t>* literal = entry.second;
3865 linker_patches->push_back(LinkerPatch::RecordPosition(literal->offset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003866 }
3867}
3868
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003869vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
3870 Uint32ToLiteralMap* map) {
3871 return map->GetOrCreate(
3872 value,
3873 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3874}
3875
Vladimir Marko58155012015-08-19 12:49:41 +00003876vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003877 return uint64_literals_.GetOrCreate(
3878 value,
3879 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003880}
3881
3882vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
3883 MethodReference target_method,
3884 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003885 return map->GetOrCreate(
3886 target_method,
3887 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003888}
3889
3890vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
3891 MethodReference target_method) {
3892 return DeduplicateMethodLiteral(target_method, &method_patches_);
3893}
3894
3895vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
3896 MethodReference target_method) {
3897 return DeduplicateMethodLiteral(target_method, &call_patches_);
3898}
3899
3900
Andreas Gampe878d58c2015-01-15 23:24:00 -08003901void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003902 // Explicit clinit checks triggered by static invokes must have been pruned by
3903 // art::PrepareForRegisterAllocation.
3904 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003905
Andreas Gampe878d58c2015-01-15 23:24:00 -08003906 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3907 return;
3908 }
3909
Alexandre Ramesd921d642015-04-16 15:07:16 +01003910 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003911 LocationSummary* locations = invoke->GetLocations();
3912 codegen_->GenerateStaticOrDirectCall(
3913 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003914 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003915}
3916
3917void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003918 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3919 return;
3920 }
3921
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003922 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003923 DCHECK(!codegen_->IsLeafMethod());
3924 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3925}
3926
Alexandre Rames67555f72014-11-18 10:55:16 +00003927void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003928 InvokeRuntimeCallingConvention calling_convention;
3929 CodeGenerator::CreateLoadClassLocationSummary(
3930 cls,
3931 LocationFrom(calling_convention.GetRegisterAt(0)),
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003932 LocationFrom(vixl::x0),
3933 /* code_generator_supports_read_barrier */ true);
Alexandre Rames67555f72014-11-18 10:55:16 +00003934}
3935
3936void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003937 if (cls->NeedsAccessCheck()) {
3938 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
3939 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
3940 cls,
3941 cls->GetDexPc(),
3942 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003943 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003944 return;
3945 }
3946
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003947 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01003948 Register out = OutputRegister(cls);
3949 Register current_method = InputRegisterAt(cls, 0);
3950 if (cls->IsReferrersClass()) {
Alexandre Rames67555f72014-11-18 10:55:16 +00003951 DCHECK(!cls->CanCallRuntime());
3952 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain44015862016-01-22 11:47:17 +00003953 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3954 GenerateGcRootFieldLoad(
3955 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Alexandre Rames67555f72014-11-18 10:55:16 +00003956 } else {
Vladimir Marko05792b92015-08-03 11:56:49 +01003957 MemberOffset resolved_types_offset = ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003958 // /* GcRoot<mirror::Class>[] */ out =
3959 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
Vladimir Marko05792b92015-08-03 11:56:49 +01003960 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00003961 // /* GcRoot<mirror::Class> */ out = out[type_index]
3962 GenerateGcRootFieldLoad(
3963 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003964
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00003965 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
3966 DCHECK(cls->CanCallRuntime());
3967 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
3968 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3969 codegen_->AddSlowPath(slow_path);
3970 if (!cls->IsInDexCache()) {
3971 __ Cbz(out, slow_path->GetEntryLabel());
3972 }
3973 if (cls->MustGenerateClinitCheck()) {
3974 GenerateClassInitializationCheck(slow_path, out);
3975 } else {
3976 __ Bind(slow_path->GetExitLabel());
3977 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003978 }
3979 }
3980}
3981
David Brazdilcb1c0552015-08-04 16:22:25 +01003982static MemOperand GetExceptionTlsAddress() {
3983 return MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
3984}
3985
Alexandre Rames67555f72014-11-18 10:55:16 +00003986void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
3987 LocationSummary* locations =
3988 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3989 locations->SetOut(Location::RequiresRegister());
3990}
3991
3992void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01003993 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
3994}
3995
3996void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
3997 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3998}
3999
4000void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4001 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004002}
4003
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004004HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4005 HLoadString::LoadKind desired_string_load_kind) {
4006 if (kEmitCompilerReadBarrier) {
4007 switch (desired_string_load_kind) {
4008 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4009 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4010 case HLoadString::LoadKind::kBootImageAddress:
4011 // TODO: Implement for read barrier.
4012 return HLoadString::LoadKind::kDexCacheViaMethod;
4013 default:
4014 break;
4015 }
4016 }
4017 switch (desired_string_load_kind) {
4018 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4019 DCHECK(!GetCompilerOptions().GetCompilePic());
4020 break;
4021 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4022 DCHECK(GetCompilerOptions().GetCompilePic());
4023 break;
4024 case HLoadString::LoadKind::kBootImageAddress:
4025 break;
4026 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravlee5de54c2016-04-20 14:22:09 +01004027 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004028 break;
4029 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravlee5de54c2016-04-20 14:22:09 +01004030 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004031 break;
4032 case HLoadString::LoadKind::kDexCacheViaMethod:
4033 break;
4034 }
4035 return desired_string_load_kind;
4036}
4037
Alexandre Rames67555f72014-11-18 10:55:16 +00004038void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004039 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004040 ? LocationSummary::kCallOnSlowPath
4041 : LocationSummary::kNoCall;
4042 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004043 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4044 locations->SetInAt(0, Location::RequiresRegister());
4045 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004046 locations->SetOut(Location::RequiresRegister());
4047}
4048
4049void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004050 Location out_loc = load->GetLocations()->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00004051 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004052
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004053 switch (load->GetLoadKind()) {
4054 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4055 DCHECK(!kEmitCompilerReadBarrier);
4056 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4057 load->GetStringIndex()));
4058 return; // No dex cache slow path.
4059 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4060 DCHECK(!kEmitCompilerReadBarrier);
4061 // Add ADRP with its PC-relative String patch.
4062 const DexFile& dex_file = load->GetDexFile();
4063 uint32_t string_index = load->GetStringIndex();
4064 vixl::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
4065 {
4066 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4067 __ Bind(adrp_label);
4068 __ adrp(out.X(), /* offset placeholder */ 0);
4069 }
4070 // Add ADD with its PC-relative String patch.
4071 vixl::Label* add_label =
4072 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4073 {
4074 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4075 __ Bind(add_label);
4076 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4077 }
4078 return; // No dex cache slow path.
4079 }
4080 case HLoadString::LoadKind::kBootImageAddress: {
4081 DCHECK(!kEmitCompilerReadBarrier);
4082 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4083 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4084 return; // No dex cache slow path.
4085 }
4086 case HLoadString::LoadKind::kDexCacheAddress: {
4087 DCHECK_NE(load->GetAddress(), 0u);
4088 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4089 // that gives a 16KiB range. To try and reduce the number of literals if we load
4090 // multiple strings, simply split the dex cache address to a 16KiB aligned base
4091 // loaded from a literal and the remaining offset embedded in the load.
4092 static_assert(sizeof(GcRoot<mirror::String>) == 4u, "Expected GC root to be 4 bytes.");
4093 DCHECK_ALIGNED(load->GetAddress(), 4u);
4094 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4095 uint64_t base_address = load->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4096 uint32_t offset = load->GetAddress() & MaxInt<uint64_t>(offset_bits);
4097 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4098 GenerateGcRootFieldLoad(load, out_loc, out.X(), offset);
4099 break;
4100 }
4101 case HLoadString::LoadKind::kDexCachePcRelative: {
4102 // Add ADRP with its PC-relative DexCache access patch.
4103 const DexFile& dex_file = load->GetDexFile();
4104 uint32_t element_offset = load->GetDexCacheElementOffset();
4105 vixl::Label* adrp_label = codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
4106 {
4107 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4108 __ Bind(adrp_label);
4109 __ adrp(out.X(), /* offset placeholder */ 0);
4110 }
4111 // Add LDR with its PC-relative DexCache access patch.
4112 vixl::Label* ldr_label =
4113 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4114 GenerateGcRootFieldLoad(load, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4115 break;
4116 }
4117 case HLoadString::LoadKind::kDexCacheViaMethod: {
4118 Register current_method = InputRegisterAt(load, 0);
4119 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4120 GenerateGcRootFieldLoad(
4121 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4122 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
4123 __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
4124 // /* GcRoot<mirror::String> */ out = out[string_index]
4125 GenerateGcRootFieldLoad(
4126 load, out_loc, out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()));
4127 break;
4128 }
4129 default:
4130 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
4131 UNREACHABLE();
4132 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004133
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004134 if (!load->IsInDexCache()) {
4135 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
4136 codegen_->AddSlowPath(slow_path);
4137 __ Cbz(out, slow_path->GetEntryLabel());
4138 __ Bind(slow_path->GetExitLabel());
4139 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004140}
4141
Alexandre Rames5319def2014-10-23 10:03:10 +01004142void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4143 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4144 locations->SetOut(Location::ConstantLocation(constant));
4145}
4146
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004147void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004148 // Will be generated at use site.
4149}
4150
Alexandre Rames67555f72014-11-18 10:55:16 +00004151void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4152 LocationSummary* locations =
4153 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4154 InvokeRuntimeCallingConvention calling_convention;
4155 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4156}
4157
4158void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4159 codegen_->InvokeRuntime(instruction->IsEnter()
4160 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
4161 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004162 instruction->GetDexPc(),
4163 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004164 if (instruction->IsEnter()) {
4165 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4166 } else {
4167 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4168 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004169}
4170
Alexandre Rames42d641b2014-10-27 14:00:51 +00004171void LocationsBuilderARM64::VisitMul(HMul* mul) {
4172 LocationSummary* locations =
4173 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4174 switch (mul->GetResultType()) {
4175 case Primitive::kPrimInt:
4176 case Primitive::kPrimLong:
4177 locations->SetInAt(0, Location::RequiresRegister());
4178 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004179 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004180 break;
4181
4182 case Primitive::kPrimFloat:
4183 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004184 locations->SetInAt(0, Location::RequiresFpuRegister());
4185 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004186 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004187 break;
4188
4189 default:
4190 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4191 }
4192}
4193
4194void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4195 switch (mul->GetResultType()) {
4196 case Primitive::kPrimInt:
4197 case Primitive::kPrimLong:
4198 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4199 break;
4200
4201 case Primitive::kPrimFloat:
4202 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004203 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004204 break;
4205
4206 default:
4207 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4208 }
4209}
4210
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004211void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4212 LocationSummary* locations =
4213 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4214 switch (neg->GetResultType()) {
4215 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004216 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004217 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004218 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004219 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004220
4221 case Primitive::kPrimFloat:
4222 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004223 locations->SetInAt(0, Location::RequiresFpuRegister());
4224 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004225 break;
4226
4227 default:
4228 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4229 }
4230}
4231
4232void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4233 switch (neg->GetResultType()) {
4234 case Primitive::kPrimInt:
4235 case Primitive::kPrimLong:
4236 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4237 break;
4238
4239 case Primitive::kPrimFloat:
4240 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004241 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004242 break;
4243
4244 default:
4245 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4246 }
4247}
4248
4249void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4250 LocationSummary* locations =
4251 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4252 InvokeRuntimeCallingConvention calling_convention;
4253 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004254 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004255 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004256 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004257}
4258
4259void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4260 LocationSummary* locations = instruction->GetLocations();
4261 InvokeRuntimeCallingConvention calling_convention;
4262 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4263 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004264 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004265 // Note: if heap poisoning is enabled, the entry point takes cares
4266 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01004267 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4268 instruction,
4269 instruction->GetDexPc(),
4270 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004271 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004272}
4273
Alexandre Rames5319def2014-10-23 10:03:10 +01004274void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4275 LocationSummary* locations =
4276 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4277 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004278 if (instruction->IsStringAlloc()) {
4279 locations->AddTemp(LocationFrom(kArtMethodRegister));
4280 } else {
4281 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4282 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4283 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004284 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4285}
4286
4287void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004288 // Note: if heap poisoning is enabled, the entry point takes cares
4289 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004290 if (instruction->IsStringAlloc()) {
4291 // String is allocated through StringFactory. Call NewEmptyString entry point.
4292 Location temp = instruction->GetLocations()->GetTemp(0);
4293 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
4294 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4295 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4296 __ Blr(lr);
4297 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4298 } else {
4299 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4300 instruction,
4301 instruction->GetDexPc(),
4302 nullptr);
4303 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4304 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004305}
4306
4307void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4308 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004309 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004310 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004311}
4312
4313void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004314 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004315 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004316 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004317 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004318 break;
4319
4320 default:
4321 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4322 }
4323}
4324
David Brazdil66d126e2015-04-03 16:02:44 +01004325void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4326 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4327 locations->SetInAt(0, Location::RequiresRegister());
4328 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4329}
4330
4331void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01004332 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
4333}
4334
Alexandre Rames5319def2014-10-23 10:03:10 +01004335void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004336 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4337 ? LocationSummary::kCallOnSlowPath
4338 : LocationSummary::kNoCall;
4339 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004340 locations->SetInAt(0, Location::RequiresRegister());
4341 if (instruction->HasUses()) {
4342 locations->SetOut(Location::SameAsFirstInput());
4343 }
4344}
4345
Calin Juravle2ae48182016-03-16 14:05:09 +00004346void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4347 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004348 return;
4349 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004350
Alexandre Ramesd921d642015-04-16 15:07:16 +01004351 BlockPoolsScope block_pools(GetVIXLAssembler());
4352 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004353 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004354 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004355}
4356
Calin Juravle2ae48182016-03-16 14:05:09 +00004357void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004358 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004359 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004360
4361 LocationSummary* locations = instruction->GetLocations();
4362 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004363
4364 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004365}
4366
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004367void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004368 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004369}
4370
Alexandre Rames67555f72014-11-18 10:55:16 +00004371void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4372 HandleBinaryOp(instruction);
4373}
4374
4375void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4376 HandleBinaryOp(instruction);
4377}
4378
Alexandre Rames3e69f162014-12-10 10:36:50 +00004379void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4380 LOG(FATAL) << "Unreachable";
4381}
4382
4383void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4384 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4385}
4386
Alexandre Rames5319def2014-10-23 10:03:10 +01004387void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4388 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4389 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4390 if (location.IsStackSlot()) {
4391 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4392 } else if (location.IsDoubleStackSlot()) {
4393 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4394 }
4395 locations->SetOut(location);
4396}
4397
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004398void InstructionCodeGeneratorARM64::VisitParameterValue(
4399 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004400 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004401}
4402
4403void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4404 LocationSummary* locations =
4405 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004406 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004407}
4408
4409void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4410 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4411 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004412}
4413
4414void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4415 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko3925c6e2016-05-17 16:30:10 +01004416 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004417 locations->SetInAt(i, Location::Any());
4418 }
4419 locations->SetOut(Location::Any());
4420}
4421
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004422void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004423 LOG(FATAL) << "Unreachable";
4424}
4425
Serban Constantinescu02164b32014-11-13 14:05:07 +00004426void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004427 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004428 LocationSummary::CallKind call_kind =
4429 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004430 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4431
4432 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004433 case Primitive::kPrimInt:
4434 case Primitive::kPrimLong:
4435 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004436 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004437 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4438 break;
4439
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004440 case Primitive::kPrimFloat:
4441 case Primitive::kPrimDouble: {
4442 InvokeRuntimeCallingConvention calling_convention;
4443 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4444 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4445 locations->SetOut(calling_convention.GetReturnLocation(type));
4446
4447 break;
4448 }
4449
Serban Constantinescu02164b32014-11-13 14:05:07 +00004450 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004451 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004452 }
4453}
4454
4455void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4456 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004457
Serban Constantinescu02164b32014-11-13 14:05:07 +00004458 switch (type) {
4459 case Primitive::kPrimInt:
4460 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004461 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004462 break;
4463 }
4464
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004465 case Primitive::kPrimFloat:
4466 case Primitive::kPrimDouble: {
4467 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
4468 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004469 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004470 if (type == Primitive::kPrimFloat) {
4471 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4472 } else {
4473 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4474 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004475 break;
4476 }
4477
Serban Constantinescu02164b32014-11-13 14:05:07 +00004478 default:
4479 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004480 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004481 }
4482}
4483
Calin Juravle27df7582015-04-17 19:12:31 +01004484void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4485 memory_barrier->SetLocations(nullptr);
4486}
4487
4488void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004489 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004490}
4491
Alexandre Rames5319def2014-10-23 10:03:10 +01004492void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4493 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4494 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004495 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004496}
4497
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004498void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004499 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004500}
4501
4502void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4503 instruction->SetLocations(nullptr);
4504}
4505
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004506void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004507 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004508}
4509
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004510void LocationsBuilderARM64::VisitRor(HRor* ror) {
4511 HandleBinaryOp(ror);
4512}
4513
4514void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4515 HandleBinaryOp(ror);
4516}
4517
Serban Constantinescu02164b32014-11-13 14:05:07 +00004518void LocationsBuilderARM64::VisitShl(HShl* shl) {
4519 HandleShift(shl);
4520}
4521
4522void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4523 HandleShift(shl);
4524}
4525
4526void LocationsBuilderARM64::VisitShr(HShr* shr) {
4527 HandleShift(shr);
4528}
4529
4530void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4531 HandleShift(shr);
4532}
4533
Alexandre Rames5319def2014-10-23 10:03:10 +01004534void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004535 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004536}
4537
4538void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004539 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004540}
4541
Alexandre Rames67555f72014-11-18 10:55:16 +00004542void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004543 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004544}
4545
4546void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004547 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004548}
4549
4550void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004551 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004552}
4553
Alexandre Rames67555f72014-11-18 10:55:16 +00004554void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004555 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004556}
4557
Calin Juravlee460d1d2015-09-29 04:52:17 +01004558void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4559 HUnresolvedInstanceFieldGet* instruction) {
4560 FieldAccessCallingConventionARM64 calling_convention;
4561 codegen_->CreateUnresolvedFieldLocationSummary(
4562 instruction, instruction->GetFieldType(), calling_convention);
4563}
4564
4565void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4566 HUnresolvedInstanceFieldGet* instruction) {
4567 FieldAccessCallingConventionARM64 calling_convention;
4568 codegen_->GenerateUnresolvedFieldAccess(instruction,
4569 instruction->GetFieldType(),
4570 instruction->GetFieldIndex(),
4571 instruction->GetDexPc(),
4572 calling_convention);
4573}
4574
4575void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4576 HUnresolvedInstanceFieldSet* instruction) {
4577 FieldAccessCallingConventionARM64 calling_convention;
4578 codegen_->CreateUnresolvedFieldLocationSummary(
4579 instruction, instruction->GetFieldType(), calling_convention);
4580}
4581
4582void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4583 HUnresolvedInstanceFieldSet* instruction) {
4584 FieldAccessCallingConventionARM64 calling_convention;
4585 codegen_->GenerateUnresolvedFieldAccess(instruction,
4586 instruction->GetFieldType(),
4587 instruction->GetFieldIndex(),
4588 instruction->GetDexPc(),
4589 calling_convention);
4590}
4591
4592void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4593 HUnresolvedStaticFieldGet* instruction) {
4594 FieldAccessCallingConventionARM64 calling_convention;
4595 codegen_->CreateUnresolvedFieldLocationSummary(
4596 instruction, instruction->GetFieldType(), calling_convention);
4597}
4598
4599void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4600 HUnresolvedStaticFieldGet* instruction) {
4601 FieldAccessCallingConventionARM64 calling_convention;
4602 codegen_->GenerateUnresolvedFieldAccess(instruction,
4603 instruction->GetFieldType(),
4604 instruction->GetFieldIndex(),
4605 instruction->GetDexPc(),
4606 calling_convention);
4607}
4608
4609void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4610 HUnresolvedStaticFieldSet* instruction) {
4611 FieldAccessCallingConventionARM64 calling_convention;
4612 codegen_->CreateUnresolvedFieldLocationSummary(
4613 instruction, instruction->GetFieldType(), calling_convention);
4614}
4615
4616void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4617 HUnresolvedStaticFieldSet* instruction) {
4618 FieldAccessCallingConventionARM64 calling_convention;
4619 codegen_->GenerateUnresolvedFieldAccess(instruction,
4620 instruction->GetFieldType(),
4621 instruction->GetFieldIndex(),
4622 instruction->GetDexPc(),
4623 calling_convention);
4624}
4625
Alexandre Rames5319def2014-10-23 10:03:10 +01004626void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4627 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4628}
4629
4630void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004631 HBasicBlock* block = instruction->GetBlock();
4632 if (block->GetLoopInformation() != nullptr) {
4633 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4634 // The back edge will generate the suspend check.
4635 return;
4636 }
4637 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4638 // The goto will generate the suspend check.
4639 return;
4640 }
4641 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004642}
4643
Alexandre Rames67555f72014-11-18 10:55:16 +00004644void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4645 LocationSummary* locations =
4646 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4647 InvokeRuntimeCallingConvention calling_convention;
4648 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4649}
4650
4651void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
4652 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004653 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004654 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004655}
4656
4657void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4658 LocationSummary* locations =
4659 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4660 Primitive::Type input_type = conversion->GetInputType();
4661 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004662 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004663 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4664 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4665 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4666 }
4667
Alexandre Rames542361f2015-01-29 16:57:31 +00004668 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004669 locations->SetInAt(0, Location::RequiresFpuRegister());
4670 } else {
4671 locations->SetInAt(0, Location::RequiresRegister());
4672 }
4673
Alexandre Rames542361f2015-01-29 16:57:31 +00004674 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004675 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4676 } else {
4677 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4678 }
4679}
4680
4681void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4682 Primitive::Type result_type = conversion->GetResultType();
4683 Primitive::Type input_type = conversion->GetInputType();
4684
4685 DCHECK_NE(input_type, result_type);
4686
Alexandre Rames542361f2015-01-29 16:57:31 +00004687 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004688 int result_size = Primitive::ComponentSize(result_type);
4689 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004690 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004691 Register output = OutputRegister(conversion);
4692 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004693 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004694 // 'int' values are used directly as W registers, discarding the top
4695 // bits, so we don't need to sign-extend and can just perform a move.
4696 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4697 // top 32 bits of the target register. We theoretically could leave those
4698 // bits unchanged, but we would have to make sure that no code uses a
4699 // 32bit input value as a 64bit value assuming that the top 32 bits are
4700 // zero.
4701 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004702 } else if (result_type == Primitive::kPrimChar ||
4703 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4704 __ Ubfx(output,
4705 output.IsX() ? source.X() : source.W(),
4706 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004707 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004708 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004709 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004710 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004711 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004712 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004713 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4714 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004715 } else if (Primitive::IsFloatingPointType(result_type) &&
4716 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004717 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4718 } else {
4719 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4720 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004721 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004722}
Alexandre Rames67555f72014-11-18 10:55:16 +00004723
Serban Constantinescu02164b32014-11-13 14:05:07 +00004724void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4725 HandleShift(ushr);
4726}
4727
4728void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4729 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004730}
4731
4732void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4733 HandleBinaryOp(instruction);
4734}
4735
4736void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4737 HandleBinaryOp(instruction);
4738}
4739
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004740void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004741 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004742 LOG(FATAL) << "Unreachable";
4743}
4744
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004745void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004746 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004747 LOG(FATAL) << "Unreachable";
4748}
4749
Mark Mendellfe57faa2015-09-18 09:26:15 -04004750// Simple implementation of packed switch - generate cascaded compare/jumps.
4751void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4752 LocationSummary* locations =
4753 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4754 locations->SetInAt(0, Location::RequiresRegister());
4755}
4756
4757void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4758 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004759 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004760 Register value_reg = InputRegisterAt(switch_instr, 0);
4761 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4762
Zheng Xu3927c8b2015-11-18 17:46:25 +08004763 // Roughly set 16 as max average assemblies generated per HIR in a graph.
4764 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * vixl::kInstructionSize;
4765 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4766 // make sure we don't emit it if the target may run out of range.
4767 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4768 // ranges and emit the tables only as required.
4769 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004770
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004771 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004772 // Current instruction id is an upper bound of the number of HIRs in the graph.
4773 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4774 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004775 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4776 Register temp = temps.AcquireW();
4777 __ Subs(temp, value_reg, Operand(lower_bound));
4778
Zheng Xu3927c8b2015-11-18 17:46:25 +08004779 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004780 // Jump to successors[0] if value == lower_bound.
4781 __ B(eq, codegen_->GetLabelOf(successors[0]));
4782 int32_t last_index = 0;
4783 for (; num_entries - last_index > 2; last_index += 2) {
4784 __ Subs(temp, temp, Operand(2));
4785 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4786 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4787 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4788 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4789 }
4790 if (num_entries - last_index == 2) {
4791 // The last missing case_value.
4792 __ Cmp(temp, Operand(1));
4793 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004794 }
4795
4796 // And the default for any other value.
4797 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4798 __ B(codegen_->GetLabelOf(default_block));
4799 }
4800 } else {
Alexandre Ramesc393d632016-04-15 11:54:06 +01004801 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004802
4803 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4804
4805 // Below instructions should use at most one blocked register. Since there are two blocked
4806 // registers, we are free to block one.
4807 Register temp_w = temps.AcquireW();
4808 Register index;
4809 // Remove the bias.
4810 if (lower_bound != 0) {
4811 index = temp_w;
4812 __ Sub(index, value_reg, Operand(lower_bound));
4813 } else {
4814 index = value_reg;
4815 }
4816
4817 // Jump to default block if index is out of the range.
4818 __ Cmp(index, Operand(num_entries));
4819 __ B(hs, codegen_->GetLabelOf(default_block));
4820
4821 // In current VIXL implementation, it won't require any blocked registers to encode the
4822 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4823 // register pressure.
4824 Register table_base = temps.AcquireX();
4825 // Load jump offset from the table.
4826 __ Adr(table_base, jump_table->GetTableStartLabel());
4827 Register jump_offset = temp_w;
4828 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4829
4830 // Jump to target block by branching to table_base(pc related) + offset.
4831 Register target_address = table_base;
4832 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4833 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004834 }
4835}
4836
Roland Levillain44015862016-01-22 11:47:17 +00004837void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4838 Location out,
4839 uint32_t offset,
4840 Location maybe_temp) {
4841 Primitive::Type type = Primitive::kPrimNot;
4842 Register out_reg = RegisterFrom(out, type);
4843 if (kEmitCompilerReadBarrier) {
4844 Register temp_reg = RegisterFrom(maybe_temp, type);
4845 if (kUseBakerReadBarrier) {
4846 // Load with fast path based Baker's read barrier.
4847 // /* HeapReference<Object> */ out = *(out + offset)
4848 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4849 out,
4850 out_reg,
4851 offset,
4852 temp_reg,
4853 /* needs_null_check */ false,
4854 /* use_load_acquire */ false);
4855 } else {
4856 // Load with slow path based read barrier.
4857 // Save the value of `out` into `maybe_temp` before overwriting it
4858 // in the following move operation, as we will need it for the
4859 // read barrier below.
4860 __ Mov(temp_reg, out_reg);
4861 // /* HeapReference<Object> */ out = *(out + offset)
4862 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4863 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4864 }
4865 } else {
4866 // Plain load with no read barrier.
4867 // /* HeapReference<Object> */ out = *(out + offset)
4868 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4869 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4870 }
4871}
4872
4873void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
4874 Location out,
4875 Location obj,
4876 uint32_t offset,
4877 Location maybe_temp) {
4878 Primitive::Type type = Primitive::kPrimNot;
4879 Register out_reg = RegisterFrom(out, type);
4880 Register obj_reg = RegisterFrom(obj, type);
4881 if (kEmitCompilerReadBarrier) {
4882 if (kUseBakerReadBarrier) {
4883 // Load with fast path based Baker's read barrier.
4884 Register temp_reg = RegisterFrom(maybe_temp, type);
4885 // /* HeapReference<Object> */ out = *(obj + offset)
4886 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4887 out,
4888 obj_reg,
4889 offset,
4890 temp_reg,
4891 /* needs_null_check */ false,
4892 /* use_load_acquire */ false);
4893 } else {
4894 // Load with slow path based read barrier.
4895 // /* HeapReference<Object> */ out = *(obj + offset)
4896 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4897 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4898 }
4899 } else {
4900 // Plain load with no read barrier.
4901 // /* HeapReference<Object> */ out = *(obj + offset)
4902 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4903 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4904 }
4905}
4906
4907void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
4908 Location root,
4909 vixl::Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004910 uint32_t offset,
4911 vixl::Label* fixup_label) {
Roland Levillain44015862016-01-22 11:47:17 +00004912 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
4913 if (kEmitCompilerReadBarrier) {
4914 if (kUseBakerReadBarrier) {
4915 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4916 // Baker's read barrier are used:
4917 //
4918 // root = obj.field;
4919 // if (Thread::Current()->GetIsGcMarking()) {
4920 // root = ReadBarrier::Mark(root)
4921 // }
4922
4923 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004924 if (fixup_label == nullptr) {
4925 __ Ldr(root_reg, MemOperand(obj, offset));
4926 } else {
4927 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4928 __ Bind(fixup_label);
4929 __ ldr(root_reg, MemOperand(obj, offset));
4930 }
Roland Levillain44015862016-01-22 11:47:17 +00004931 static_assert(
4932 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
4933 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
4934 "have different sizes.");
4935 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
4936 "art::mirror::CompressedReference<mirror::Object> and int32_t "
4937 "have different sizes.");
4938
4939 // Slow path used to mark the GC root `root`.
4940 SlowPathCodeARM64* slow_path =
4941 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root, root);
4942 codegen_->AddSlowPath(slow_path);
4943
4944 MacroAssembler* masm = GetVIXLAssembler();
4945 UseScratchRegisterScope temps(masm);
4946 Register temp = temps.AcquireW();
4947 // temp = Thread::Current()->GetIsGcMarking()
4948 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64WordSize>().Int32Value()));
4949 __ Cbnz(temp, slow_path->GetEntryLabel());
4950 __ Bind(slow_path->GetExitLabel());
4951 } else {
4952 // GC root loaded through a slow path for read barriers other
4953 // than Baker's.
4954 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004955 if (fixup_label == nullptr) {
4956 __ Add(root_reg.X(), obj.X(), offset);
4957 } else {
4958 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4959 __ Bind(fixup_label);
4960 __ add(root_reg.X(), obj.X(), offset);
4961 }
Roland Levillain44015862016-01-22 11:47:17 +00004962 // /* mirror::Object* */ root = root->Read()
4963 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
4964 }
4965 } else {
4966 // Plain GC root load with no read barrier.
4967 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004968 if (fixup_label == nullptr) {
4969 __ Ldr(root_reg, MemOperand(obj, offset));
4970 } else {
4971 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4972 __ Bind(fixup_label);
4973 __ ldr(root_reg, MemOperand(obj, offset));
4974 }
Roland Levillain44015862016-01-22 11:47:17 +00004975 // Note that GC roots are not affected by heap poisoning, thus we
4976 // do not have to unpoison `root_reg` here.
4977 }
4978}
4979
4980void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
4981 Location ref,
4982 vixl::Register obj,
4983 uint32_t offset,
4984 Register temp,
4985 bool needs_null_check,
4986 bool use_load_acquire) {
4987 DCHECK(kEmitCompilerReadBarrier);
4988 DCHECK(kUseBakerReadBarrier);
4989
4990 // /* HeapReference<Object> */ ref = *(obj + offset)
4991 Location no_index = Location::NoLocation();
Roland Levillainb6a94412016-06-23 13:48:47 +01004992 size_t no_scale_factor = 0U;
4993 GenerateReferenceLoadWithBakerReadBarrier(instruction,
4994 ref,
4995 obj,
4996 offset,
4997 no_index,
4998 no_scale_factor,
4999 temp,
5000 needs_null_check,
5001 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005002}
5003
5004void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5005 Location ref,
5006 vixl::Register obj,
5007 uint32_t data_offset,
5008 Location index,
5009 Register temp,
5010 bool needs_null_check) {
5011 DCHECK(kEmitCompilerReadBarrier);
5012 DCHECK(kUseBakerReadBarrier);
5013
5014 // Array cells are never volatile variables, therefore array loads
5015 // never use Load-Acquire instructions on ARM64.
5016 const bool use_load_acquire = false;
5017
Roland Levillainb6a94412016-06-23 13:48:47 +01005018 static_assert(
5019 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5020 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005021 // /* HeapReference<Object> */ ref =
5022 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainb6a94412016-06-23 13:48:47 +01005023 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5024 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5025 ref,
5026 obj,
5027 data_offset,
5028 index,
5029 scale_factor,
5030 temp,
5031 needs_null_check,
5032 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005033}
5034
5035void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5036 Location ref,
5037 vixl::Register obj,
5038 uint32_t offset,
5039 Location index,
Roland Levillainb6a94412016-06-23 13:48:47 +01005040 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005041 Register temp,
5042 bool needs_null_check,
5043 bool use_load_acquire) {
5044 DCHECK(kEmitCompilerReadBarrier);
5045 DCHECK(kUseBakerReadBarrier);
Roland Levillainb6a94412016-06-23 13:48:47 +01005046 // If we are emitting an array load, we should not be using a
5047 // Load Acquire instruction. In other words:
5048 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5049 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005050
5051 MacroAssembler* masm = GetVIXLAssembler();
5052 UseScratchRegisterScope temps(masm);
5053
5054 // In slow path based read barriers, the read barrier call is
5055 // inserted after the original load. However, in fast path based
5056 // Baker's read barriers, we need to perform the load of
5057 // mirror::Object::monitor_ *before* the original reference load.
5058 // This load-load ordering is required by the read barrier.
5059 // The fast path/slow path (for Baker's algorithm) should look like:
5060 //
5061 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5062 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5063 // HeapReference<Object> ref = *src; // Original reference load.
5064 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5065 // if (is_gray) {
5066 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5067 // }
5068 //
5069 // Note: the original implementation in ReadBarrier::Barrier is
5070 // slightly more complex as it performs additional checks that we do
5071 // not do here for performance reasons.
5072
5073 Primitive::Type type = Primitive::kPrimNot;
5074 Register ref_reg = RegisterFrom(ref, type);
5075 DCHECK(obj.IsW());
5076 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5077
5078 // /* int32_t */ monitor = obj->monitor_
5079 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5080 if (needs_null_check) {
5081 MaybeRecordImplicitNullCheck(instruction);
5082 }
5083 // /* LockWord */ lock_word = LockWord(monitor)
5084 static_assert(sizeof(LockWord) == sizeof(int32_t),
5085 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005086
Vladimir Marko836c6542016-07-11 19:30:56 +01005087 // Introduce a dependency on the lock_word including rb_state,
5088 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005089 // a memory barrier (which would be more expensive).
Vladimir Marko836c6542016-07-11 19:30:56 +01005090 // obj is unchanged by this operation, but its value now depends on temp.
5091 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005092
5093 // The actual reference load.
5094 if (index.IsValid()) {
Roland Levillainb6a94412016-06-23 13:48:47 +01005095 // Load types involving an "index".
5096 if (use_load_acquire) {
5097 // UnsafeGetObjectVolatile intrinsic case.
5098 // Register `index` is not an index in an object array, but an
5099 // offset to an object reference field within object `obj`.
5100 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5101 DCHECK(instruction->GetLocations()->Intrinsified());
5102 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5103 << instruction->AsInvoke()->GetIntrinsic();
5104 DCHECK_EQ(offset, 0U);
5105 DCHECK_EQ(scale_factor, 0U);
5106 DCHECK_EQ(needs_null_check, 0U);
5107 // /* HeapReference<Object> */ ref = *(obj + index)
5108 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5109 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005110 } else {
Roland Levillainb6a94412016-06-23 13:48:47 +01005111 // ArrayGet and UnsafeGetObject intrinsics cases.
5112 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5113 if (index.IsConstant()) {
5114 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5115 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5116 } else {
Vladimir Marko836c6542016-07-11 19:30:56 +01005117 Register temp2 = temps.AcquireW();
Roland Levillainb6a94412016-06-23 13:48:47 +01005118 __ Add(temp2, obj, offset);
5119 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, scale_factor));
5120 temps.Release(temp2);
5121 }
Roland Levillain44015862016-01-22 11:47:17 +00005122 }
Roland Levillain44015862016-01-22 11:47:17 +00005123 } else {
5124 // /* HeapReference<Object> */ ref = *(obj + offset)
5125 MemOperand field = HeapOperand(obj, offset);
5126 if (use_load_acquire) {
5127 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5128 } else {
5129 Load(type, ref_reg, field);
5130 }
5131 }
5132
5133 // Object* ref = ref_addr->AsMirrorPtr()
5134 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5135
5136 // Slow path used to mark the object `ref` when it is gray.
5137 SlowPathCodeARM64* slow_path =
5138 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref, ref);
5139 AddSlowPath(slow_path);
5140
5141 // if (rb_state == ReadBarrier::gray_ptr_)
5142 // ref = ReadBarrier::Mark(ref);
Vladimir Marko836c6542016-07-11 19:30:56 +01005143 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5144 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
5145 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
5146 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
5147 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005148 __ Bind(slow_path->GetExitLabel());
5149}
5150
5151void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5152 Location out,
5153 Location ref,
5154 Location obj,
5155 uint32_t offset,
5156 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005157 DCHECK(kEmitCompilerReadBarrier);
5158
Roland Levillain44015862016-01-22 11:47:17 +00005159 // Insert a slow path based read barrier *after* the reference load.
5160 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005161 // If heap poisoning is enabled, the unpoisoning of the loaded
5162 // reference will be carried out by the runtime within the slow
5163 // path.
5164 //
5165 // Note that `ref` currently does not get unpoisoned (when heap
5166 // poisoning is enabled), which is alright as the `ref` argument is
5167 // not used by the artReadBarrierSlow entry point.
5168 //
5169 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5170 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5171 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5172 AddSlowPath(slow_path);
5173
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005174 __ B(slow_path->GetEntryLabel());
5175 __ Bind(slow_path->GetExitLabel());
5176}
5177
Roland Levillain44015862016-01-22 11:47:17 +00005178void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5179 Location out,
5180 Location ref,
5181 Location obj,
5182 uint32_t offset,
5183 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005184 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005185 // Baker's read barriers shall be handled by the fast path
5186 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5187 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005188 // If heap poisoning is enabled, unpoisoning will be taken care of
5189 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005190 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005191 } else if (kPoisonHeapReferences) {
5192 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5193 }
5194}
5195
Roland Levillain44015862016-01-22 11:47:17 +00005196void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5197 Location out,
5198 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005199 DCHECK(kEmitCompilerReadBarrier);
5200
Roland Levillain44015862016-01-22 11:47:17 +00005201 // Insert a slow path based read barrier *after* the GC root load.
5202 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005203 // Note that GC roots are not affected by heap poisoning, so we do
5204 // not need to do anything special for this here.
5205 SlowPathCodeARM64* slow_path =
5206 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5207 AddSlowPath(slow_path);
5208
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005209 __ B(slow_path->GetEntryLabel());
5210 __ Bind(slow_path->GetExitLabel());
5211}
5212
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005213void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5214 LocationSummary* locations =
5215 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5216 locations->SetInAt(0, Location::RequiresRegister());
5217 locations->SetOut(Location::RequiresRegister());
5218}
5219
5220void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5221 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005222 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayb3cd84a2016-07-13 14:13:48 +01005223 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005224 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayb3cd84a2016-07-13 14:13:48 +01005225 __ Ldr(XRegisterFrom(locations->Out()),
5226 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005227 } else {
Nicolas Geoffrayb3cd84a2016-07-13 14:13:48 +01005228 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
5229 instruction->GetIndex() % ImTable::kSize, kArm64PointerSize));
Artem Udovichenkodf2d4f22016-06-30 09:18:25 +00005230 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5231 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayb3cd84a2016-07-13 14:13:48 +01005232 __ Ldr(XRegisterFrom(locations->Out()),
5233 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005234 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005235}
5236
5237
5238
Alexandre Rames67555f72014-11-18 10:55:16 +00005239#undef __
5240#undef QUICK_ENTRY_POINT
5241
Alexandre Rames5319def2014-10-23 10:03:10 +01005242} // namespace arm64
5243} // namespace art