blob: a29e9f3e805227799412b8d1596f6c982b62240b [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Rames5319def2014-10-23 10:03:10 +010037
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
Roland Levillain22ccc3a2015-11-24 13:10:05 +000044template<class MirrorType>
45class GcRoot;
46
Alexandre Rames5319def2014-10-23 10:03:10 +010047namespace arm64 {
48
Alexandre Ramesbe919d92016-08-23 18:33:36 +010049using helpers::ARM64EncodableConstantOrRegister;
50using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080051using helpers::CPURegisterFrom;
52using helpers::DRegisterFrom;
53using helpers::FPRegisterFrom;
54using helpers::HeapOperand;
55using helpers::HeapOperandFrom;
56using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010057using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080058using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080059using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080061using helpers::Int64ConstantFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010062using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080063using helpers::LocationFrom;
64using helpers::OperandFromMemOperand;
65using helpers::OutputCPURegister;
66using helpers::OutputFPRegister;
67using helpers::OutputRegister;
68using helpers::RegisterFrom;
69using helpers::StackOperandFrom;
70using helpers::VIXLRegCodeFromART;
71using helpers::WRegisterFrom;
72using helpers::XRegisterFrom;
73
Alexandre Rames5319def2014-10-23 10:03:10 +010074static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000075// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080076// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
77// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000078static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010079
Alexandre Rames5319def2014-10-23 10:03:10 +010080inline Condition ARM64Condition(IfCondition cond) {
81 switch (cond) {
82 case kCondEQ: return eq;
83 case kCondNE: return ne;
84 case kCondLT: return lt;
85 case kCondLE: return le;
86 case kCondGT: return gt;
87 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070088 case kCondB: return lo;
89 case kCondBE: return ls;
90 case kCondA: return hi;
91 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010092 }
Roland Levillain7f63c522015-07-13 15:54:55 +000093 LOG(FATAL) << "Unreachable";
94 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010095}
96
Vladimir Markod6e069b2016-01-18 11:11:01 +000097inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
98 // The ARM64 condition codes can express all the necessary branches, see the
99 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
100 // There is no dex instruction or HIR that would need the missing conditions
101 // "equal or unordered" or "not equal".
102 switch (cond) {
103 case kCondEQ: return eq;
104 case kCondNE: return ne /* unordered */;
105 case kCondLT: return gt_bias ? cc : lt /* unordered */;
106 case kCondLE: return gt_bias ? ls : le /* unordered */;
107 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
108 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
109 default:
110 LOG(FATAL) << "UNREACHABLE";
111 UNREACHABLE();
112 }
113}
114
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000116 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
117 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
118 // but we use the exact registers for clarity.
119 if (return_type == Primitive::kPrimFloat) {
120 return LocationFrom(s0);
121 } else if (return_type == Primitive::kPrimDouble) {
122 return LocationFrom(d0);
123 } else if (return_type == Primitive::kPrimLong) {
124 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100125 } else if (return_type == Primitive::kPrimVoid) {
126 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000127 } else {
128 return LocationFrom(w0);
129 }
130}
131
Alexandre Rames5319def2014-10-23 10:03:10 +0100132Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000133 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100134}
135
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100136// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
137#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700138#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100139
Zheng Xuda403092015-04-24 17:35:39 +0800140// Calculate memory accessing operand for save/restore live registers.
141static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100142 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800143 int64_t spill_offset,
144 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100145 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
146 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
147 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800148 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100149 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800150 codegen->GetNumberOfFloatingPointRegisters()));
151
Vladimir Marko804b03f2016-09-14 16:26:36 +0100152 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
153 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800154
155 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
156 UseScratchRegisterScope temps(masm);
157
158 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100159 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
160 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800161 int64_t reg_size = kXRegSizeInBytes;
162 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
163 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100164 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800165 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
166 // If the offset does not fit in the instruction's immediate field, use an alternate register
167 // to compute the base address(float point registers spill base address).
168 Register new_base = temps.AcquireSameSizeAs(base);
169 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
170 base = new_base;
171 spill_offset = -core_spill_size;
172 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
173 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
174 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
175 }
176
177 if (is_save) {
178 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
179 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
180 } else {
181 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
182 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
183 }
184}
185
186void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800187 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100188 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
189 for (uint32_t i : LowToHighBits(core_spills)) {
190 // If the register holds an object, update the stack mask.
191 if (locations->RegisterContainsObject(i)) {
192 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800193 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100194 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
195 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
196 saved_core_stack_offsets_[i] = stack_offset;
197 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800198 }
199
Vladimir Marko804b03f2016-09-14 16:26:36 +0100200 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
201 for (uint32_t i : LowToHighBits(fp_spills)) {
202 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
203 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
204 saved_fpu_stack_offsets_[i] = stack_offset;
205 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800206 }
207
Vladimir Marko804b03f2016-09-14 16:26:36 +0100208 SaveRestoreLiveRegistersHelper(codegen,
209 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800210 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
211}
212
213void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100214 SaveRestoreLiveRegistersHelper(codegen,
215 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800216 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
217}
218
Alexandre Rames5319def2014-10-23 10:03:10 +0100219class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
220 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000221 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100222
Alexandre Rames67555f72014-11-18 10:55:16 +0000223 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100224 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000225 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100226
Alexandre Rames5319def2014-10-23 10:03:10 +0100227 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000228 if (instruction_->CanThrowIntoCatchBlock()) {
229 // Live registers will be restored in the catch block if caught.
230 SaveLiveRegisters(codegen, instruction_->GetLocations());
231 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000232 // We're moving two locations to locations that could overlap, so we need a parallel
233 // move resolver.
234 InvokeRuntimeCallingConvention calling_convention;
235 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100236 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
237 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000238 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
239 ? kQuickThrowStringBounds
240 : kQuickThrowArrayBounds;
241 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100242 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800243 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100244 }
245
Alexandre Rames8158f282015-08-07 10:26:17 +0100246 bool IsFatal() const OVERRIDE { return true; }
247
Alexandre Rames9931f312015-06-19 14:47:01 +0100248 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
249
Alexandre Rames5319def2014-10-23 10:03:10 +0100250 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100251 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
252};
253
Alexandre Rames67555f72014-11-18 10:55:16 +0000254class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
255 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000256 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000257
258 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
259 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
260 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000261 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800262 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000263 }
264
Alexandre Rames8158f282015-08-07 10:26:17 +0100265 bool IsFatal() const OVERRIDE { return true; }
266
Alexandre Rames9931f312015-06-19 14:47:01 +0100267 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
268
Alexandre Rames67555f72014-11-18 10:55:16 +0000269 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000270 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
271};
272
273class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
274 public:
275 LoadClassSlowPathARM64(HLoadClass* cls,
276 HInstruction* at,
277 uint32_t dex_pc,
278 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000279 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000280 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
281 }
282
283 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
284 LocationSummary* locations = at_->GetLocations();
285 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
286
287 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000288 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000289
290 InvokeRuntimeCallingConvention calling_convention;
291 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000292 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
293 : kQuickInitializeType;
294 arm64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800295 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100296 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800297 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100298 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800299 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000300
301 // Move the class to the desired location.
302 Location out = locations->Out();
303 if (out.IsValid()) {
304 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
305 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000306 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000307 }
308
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000309 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000310 __ B(GetExitLabel());
311 }
312
Alexandre Rames9931f312015-06-19 14:47:01 +0100313 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
314
Alexandre Rames67555f72014-11-18 10:55:16 +0000315 private:
316 // The class this slow path will load.
317 HLoadClass* const cls_;
318
319 // The instruction where this slow path is happening.
320 // (Might be the load class or an initialization check).
321 HInstruction* const at_;
322
323 // The dex PC of `at_`.
324 const uint32_t dex_pc_;
325
326 // Whether to initialize the class.
327 const bool do_clinit_;
328
329 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
330};
331
Alexandre Rames5319def2014-10-23 10:03:10 +0100332class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
333 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000334 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100335
Alexandre Rames67555f72014-11-18 10:55:16 +0000336 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
337 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100338 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000339 if (instruction_->CanThrowIntoCatchBlock()) {
340 // Live registers will be restored in the catch block if caught.
341 SaveLiveRegisters(codegen, instruction_->GetLocations());
342 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000343 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
344 instruction_,
345 instruction_->GetDexPc(),
346 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800347 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100348 }
349
Alexandre Rames8158f282015-08-07 10:26:17 +0100350 bool IsFatal() const OVERRIDE { return true; }
351
Alexandre Rames9931f312015-06-19 14:47:01 +0100352 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
353
Alexandre Rames5319def2014-10-23 10:03:10 +0100354 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100355 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
356};
357
358class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
359 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100360 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000361 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100362
Alexandre Rames67555f72014-11-18 10:55:16 +0000363 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
364 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100365 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000366 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800367 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000368 if (successor_ == nullptr) {
369 __ B(GetReturnLabel());
370 } else {
371 __ B(arm64_codegen->GetLabelOf(successor_));
372 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100373 }
374
Scott Wakeling97c72b72016-06-24 16:19:36 +0100375 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100376 DCHECK(successor_ == nullptr);
377 return &return_label_;
378 }
379
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100380 HBasicBlock* GetSuccessor() const {
381 return successor_;
382 }
383
Alexandre Rames9931f312015-06-19 14:47:01 +0100384 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
385
Alexandre Rames5319def2014-10-23 10:03:10 +0100386 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100387 // If not null, the block to branch to after the suspend check.
388 HBasicBlock* const successor_;
389
390 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100391 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100392
393 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
394};
395
Alexandre Rames67555f72014-11-18 10:55:16 +0000396class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
397 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000398 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000399 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000400
401 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000402 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100403 Location class_to_check = locations->InAt(1);
404 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
405 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000406 DCHECK(instruction_->IsCheckCast()
407 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
408 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100409 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000410
Alexandre Rames67555f72014-11-18 10:55:16 +0000411 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000412
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000413 if (!is_fatal_) {
414 SaveLiveRegisters(codegen, locations);
415 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000416
417 // We're moving two locations to locations that could overlap, so we need a parallel
418 // move resolver.
419 InvokeRuntimeCallingConvention calling_convention;
420 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100421 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
422 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000423
424 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000425 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Andreas Gampe67409972016-07-19 22:34:53 -0700426 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t,
Roland Levillain888d0672015-11-23 18:53:50 +0000427 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000428 Primitive::Type ret_type = instruction_->GetType();
429 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
430 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
431 } else {
432 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000433 arm64_codegen->InvokeRuntime(kQuickCheckCast, instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800434 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000435 }
436
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000437 if (!is_fatal_) {
438 RestoreLiveRegisters(codegen, locations);
439 __ B(GetExitLabel());
440 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000441 }
442
Alexandre Rames9931f312015-06-19 14:47:01 +0100443 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100444 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100445
Alexandre Rames67555f72014-11-18 10:55:16 +0000446 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000447 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000448
Alexandre Rames67555f72014-11-18 10:55:16 +0000449 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
450};
451
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700452class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
453 public:
Aart Bik42249c32016-01-07 15:33:50 -0800454 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000455 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700456
457 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800458 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700459 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000460 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000461 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700462 }
463
Alexandre Rames9931f312015-06-19 14:47:01 +0100464 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
465
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700466 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700467 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
468};
469
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100470class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
471 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000472 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100473
474 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
475 LocationSummary* locations = instruction_->GetLocations();
476 __ Bind(GetEntryLabel());
477 SaveLiveRegisters(codegen, locations);
478
479 InvokeRuntimeCallingConvention calling_convention;
480 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
481 parallel_move.AddMove(
482 locations->InAt(0),
483 LocationFrom(calling_convention.GetRegisterAt(0)),
484 Primitive::kPrimNot,
485 nullptr);
486 parallel_move.AddMove(
487 locations->InAt(1),
488 LocationFrom(calling_convention.GetRegisterAt(1)),
489 Primitive::kPrimInt,
490 nullptr);
491 parallel_move.AddMove(
492 locations->InAt(2),
493 LocationFrom(calling_convention.GetRegisterAt(2)),
494 Primitive::kPrimNot,
495 nullptr);
496 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
497
498 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000499 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100500 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
501 RestoreLiveRegisters(codegen, locations);
502 __ B(GetExitLabel());
503 }
504
505 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
506
507 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100508 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
509};
510
Zheng Xu3927c8b2015-11-18 17:46:25 +0800511void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
512 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000513 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800514
515 // We are about to use the assembler to place literals directly. Make sure we have enough
516 // underlying code buffer and we have generated the jump table with right size.
517 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
518 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
519
520 __ Bind(&table_start_);
521 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
522 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100523 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800524 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100525 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800526 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
527 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
528 Literal<int32_t> literal(jump_offset);
529 __ place(&literal);
530 }
531}
532
Roland Levillain44015862016-01-22 11:47:17 +0000533// Slow path marking an object during a read barrier.
534class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
535 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100536 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location obj)
537 : SlowPathCodeARM64(instruction), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000538 DCHECK(kEmitCompilerReadBarrier);
539 }
540
541 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
542
543 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
544 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain44015862016-01-22 11:47:17 +0000545 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100546 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(obj_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000547 DCHECK(instruction_->IsInstanceFieldGet() ||
548 instruction_->IsStaticFieldGet() ||
549 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100550 instruction_->IsArraySet() ||
Roland Levillain44015862016-01-22 11:47:17 +0000551 instruction_->IsLoadClass() ||
552 instruction_->IsLoadString() ||
553 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100554 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100555 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
556 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000557 << "Unexpected instruction in read barrier marking slow path: "
558 << instruction_->DebugName();
559
560 __ Bind(GetEntryLabel());
Roland Levillain4359e612016-07-20 11:32:19 +0100561 // No need to save live registers; it's taken care of by the
562 // entrypoint. Also, there is no need to update the stack mask,
563 // as this runtime call will not trigger a garbage collection.
Roland Levillain44015862016-01-22 11:47:17 +0000564 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100565 DCHECK_NE(obj_.reg(), LR);
566 DCHECK_NE(obj_.reg(), WSP);
567 DCHECK_NE(obj_.reg(), WZR);
Roland Levillain0b671c02016-08-19 12:02:34 +0100568 // IP0 is used internally by the ReadBarrierMarkRegX entry point
569 // as a temporary, it cannot be the entry point's input/output.
Mathieu Chartier36a270a2016-07-28 18:08:51 -0700570 DCHECK_NE(obj_.reg(), IP0);
Roland Levillain02b75802016-07-13 11:54:35 +0100571 DCHECK(0 <= obj_.reg() && obj_.reg() < kNumberOfWRegisters) << obj_.reg();
572 // "Compact" slow path, saving two moves.
573 //
574 // Instead of using the standard runtime calling convention (input
575 // and output in W0):
576 //
577 // W0 <- obj
578 // W0 <- ReadBarrierMark(W0)
579 // obj <- W0
580 //
581 // we just use rX (the register holding `obj`) as input and output
582 // of a dedicated entrypoint:
583 //
584 // rX <- ReadBarrierMarkRegX(rX)
585 //
586 int32_t entry_point_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -0700587 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(obj_.reg());
Roland Levillaindec8f632016-07-22 17:10:06 +0100588 // This runtime call does not require a stack map.
589 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain44015862016-01-22 11:47:17 +0000590 __ B(GetExitLabel());
591 }
592
593 private:
Roland Levillain44015862016-01-22 11:47:17 +0000594 const Location obj_;
595
596 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
597};
598
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000599// Slow path generating a read barrier for a heap reference.
600class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
601 public:
602 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
603 Location out,
604 Location ref,
605 Location obj,
606 uint32_t offset,
607 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000608 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000609 out_(out),
610 ref_(ref),
611 obj_(obj),
612 offset_(offset),
613 index_(index) {
614 DCHECK(kEmitCompilerReadBarrier);
615 // If `obj` is equal to `out` or `ref`, it means the initial object
616 // has been overwritten by (or after) the heap object reference load
617 // to be instrumented, e.g.:
618 //
619 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000620 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000621 //
622 // In that case, we have lost the information about the original
623 // object, and the emitted read barrier cannot work properly.
624 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
625 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
626 }
627
628 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
629 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
630 LocationSummary* locations = instruction_->GetLocations();
631 Primitive::Type type = Primitive::kPrimNot;
632 DCHECK(locations->CanCall());
633 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100634 DCHECK(instruction_->IsInstanceFieldGet() ||
635 instruction_->IsStaticFieldGet() ||
636 instruction_->IsArrayGet() ||
637 instruction_->IsInstanceOf() ||
638 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100639 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000640 << "Unexpected instruction in read barrier for heap reference slow path: "
641 << instruction_->DebugName();
Roland Levillain4a3aa572016-08-15 13:17:06 +0000642 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000643 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100644 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000645
646 __ Bind(GetEntryLabel());
647
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000648 SaveLiveRegisters(codegen, locations);
649
650 // We may have to change the index's value, but as `index_` is a
651 // constant member (like other "inputs" of this slow path),
652 // introduce a copy of it, `index`.
653 Location index = index_;
654 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100655 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000656 if (instruction_->IsArrayGet()) {
657 // Compute the actual memory offset and store it in `index`.
658 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
659 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
660 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
661 // We are about to change the value of `index_reg` (see the
662 // calls to vixl::MacroAssembler::Lsl and
663 // vixl::MacroAssembler::Mov below), but it has
664 // not been saved by the previous call to
665 // art::SlowPathCode::SaveLiveRegisters, as it is a
666 // callee-save register --
667 // art::SlowPathCode::SaveLiveRegisters does not consider
668 // callee-save registers, as it has been designed with the
669 // assumption that callee-save registers are supposed to be
670 // handled by the called function. So, as a callee-save
671 // register, `index_reg` _would_ eventually be saved onto
672 // the stack, but it would be too late: we would have
673 // changed its value earlier. Therefore, we manually save
674 // it here into another freely available register,
675 // `free_reg`, chosen of course among the caller-save
676 // registers (as a callee-save `free_reg` register would
677 // exhibit the same problem).
678 //
679 // Note we could have requested a temporary register from
680 // the register allocator instead; but we prefer not to, as
681 // this is a slow path, and we know we can find a
682 // caller-save register that is available.
683 Register free_reg = FindAvailableCallerSaveRegister(codegen);
684 __ Mov(free_reg.W(), index_reg);
685 index_reg = free_reg;
686 index = LocationFrom(index_reg);
687 } else {
688 // The initial register stored in `index_` has already been
689 // saved in the call to art::SlowPathCode::SaveLiveRegisters
690 // (as it is not a callee-save register), so we can freely
691 // use it.
692 }
693 // Shifting the index value contained in `index_reg` by the scale
694 // factor (2) cannot overflow in practice, as the runtime is
695 // unable to allocate object arrays with a size larger than
696 // 2^26 - 1 (that is, 2^28 - 4 bytes).
697 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
698 static_assert(
699 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
700 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
701 __ Add(index_reg, index_reg, Operand(offset_));
702 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100703 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
704 // intrinsics, `index_` is not shifted by a scale factor of 2
705 // (as in the case of ArrayGet), as it is actually an offset
706 // to an object field within an object.
707 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000708 DCHECK(instruction_->GetLocations()->Intrinsified());
709 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
710 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
711 << instruction_->AsInvoke()->GetIntrinsic();
712 DCHECK_EQ(offset_, 0U);
Roland Levillaina7426c62016-08-03 15:02:10 +0100713 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000714 }
715 }
716
717 // We're moving two or three locations to locations that could
718 // overlap, so we need a parallel move resolver.
719 InvokeRuntimeCallingConvention calling_convention;
720 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
721 parallel_move.AddMove(ref_,
722 LocationFrom(calling_convention.GetRegisterAt(0)),
723 type,
724 nullptr);
725 parallel_move.AddMove(obj_,
726 LocationFrom(calling_convention.GetRegisterAt(1)),
727 type,
728 nullptr);
729 if (index.IsValid()) {
730 parallel_move.AddMove(index,
731 LocationFrom(calling_convention.GetRegisterAt(2)),
732 Primitive::kPrimInt,
733 nullptr);
734 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
735 } else {
736 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
737 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
738 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000739 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000740 instruction_,
741 instruction_->GetDexPc(),
742 this);
743 CheckEntrypointTypes<
744 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
745 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
746
747 RestoreLiveRegisters(codegen, locations);
748
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000749 __ B(GetExitLabel());
750 }
751
752 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
753
754 private:
755 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100756 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
757 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000758 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
759 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
760 return Register(VIXLRegCodeFromART(i), kXRegSize);
761 }
762 }
763 // We shall never fail to find a free caller-save register, as
764 // there are more than two core caller-save registers on ARM64
765 // (meaning it is possible to find one which is different from
766 // `ref` and `obj`).
767 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
768 LOG(FATAL) << "Could not find a free register";
769 UNREACHABLE();
770 }
771
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000772 const Location out_;
773 const Location ref_;
774 const Location obj_;
775 const uint32_t offset_;
776 // An additional location containing an index to an array.
777 // Only used for HArrayGet and the UnsafeGetObject &
778 // UnsafeGetObjectVolatile intrinsics.
779 const Location index_;
780
781 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
782};
783
784// Slow path generating a read barrier for a GC root.
785class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
786 public:
787 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000788 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000789 DCHECK(kEmitCompilerReadBarrier);
790 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000791
792 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
793 LocationSummary* locations = instruction_->GetLocations();
794 Primitive::Type type = Primitive::kPrimNot;
795 DCHECK(locations->CanCall());
796 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000797 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
798 << "Unexpected instruction in read barrier for GC root slow path: "
799 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000800
801 __ Bind(GetEntryLabel());
802 SaveLiveRegisters(codegen, locations);
803
804 InvokeRuntimeCallingConvention calling_convention;
805 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
806 // The argument of the ReadBarrierForRootSlow is not a managed
807 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
808 // thus we need a 64-bit move here, and we cannot use
809 //
810 // arm64_codegen->MoveLocation(
811 // LocationFrom(calling_convention.GetRegisterAt(0)),
812 // root_,
813 // type);
814 //
815 // which would emit a 32-bit move, as `type` is a (32-bit wide)
816 // reference type (`Primitive::kPrimNot`).
817 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000818 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000819 instruction_,
820 instruction_->GetDexPc(),
821 this);
822 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
823 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
824
825 RestoreLiveRegisters(codegen, locations);
826 __ B(GetExitLabel());
827 }
828
829 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
830
831 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000832 const Location out_;
833 const Location root_;
834
835 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
836};
837
Alexandre Rames5319def2014-10-23 10:03:10 +0100838#undef __
839
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100840Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100841 Location next_location;
842 if (type == Primitive::kPrimVoid) {
843 LOG(FATAL) << "Unreachable type " << type;
844 }
845
Alexandre Rames542361f2015-01-29 16:57:31 +0000846 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100847 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
848 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000849 } else if (!Primitive::IsFloatingPointType(type) &&
850 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000851 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
852 } else {
853 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000854 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
855 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100856 }
857
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000858 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000859 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100860 return next_location;
861}
862
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100863Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100864 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100865}
866
Serban Constantinescu579885a2015-02-22 20:51:33 +0000867CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
868 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100869 const CompilerOptions& compiler_options,
870 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100871 : CodeGenerator(graph,
872 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000873 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000874 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100875 callee_saved_core_registers.GetList(),
876 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100877 compiler_options,
878 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100879 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800880 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100881 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000882 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000883 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100884 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000885 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000886 uint32_literals_(std::less<uint32_t>(),
887 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100888 uint64_literals_(std::less<uint64_t>(),
889 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
890 method_patches_(MethodReferenceComparator(),
891 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
892 call_patches_(MethodReferenceComparator(),
893 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
894 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000895 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
896 boot_image_string_patches_(StringReferenceValueComparator(),
897 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
898 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100899 boot_image_type_patches_(TypeReferenceValueComparator(),
900 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
901 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000902 boot_image_address_patches_(std::less<uint32_t>(),
903 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000904 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000905 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000906}
Alexandre Rames5319def2014-10-23 10:03:10 +0100907
Alexandre Rames67555f72014-11-18 10:55:16 +0000908#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100909
Zheng Xu3927c8b2015-11-18 17:46:25 +0800910void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100911 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800912 jump_table->EmitTable(this);
913 }
914}
915
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000916void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800917 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000918 // Ensure we emit the literal pool.
919 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000920
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000921 CodeGenerator::Finalize(allocator);
922}
923
Zheng Xuad4450e2015-04-17 18:48:56 +0800924void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
925 // Note: There are 6 kinds of moves:
926 // 1. constant -> GPR/FPR (non-cycle)
927 // 2. constant -> stack (non-cycle)
928 // 3. GPR/FPR -> GPR/FPR
929 // 4. GPR/FPR -> stack
930 // 5. stack -> GPR/FPR
931 // 6. stack -> stack (non-cycle)
932 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
933 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
934 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
935 // dependency.
936 vixl_temps_.Open(GetVIXLAssembler());
937}
938
939void ParallelMoveResolverARM64::FinishEmitNativeCode() {
940 vixl_temps_.Close();
941}
942
943Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
944 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
945 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
946 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
947 Location scratch = GetScratchLocation(kind);
948 if (!scratch.Equals(Location::NoLocation())) {
949 return scratch;
950 }
951 // Allocate from VIXL temp registers.
952 if (kind == Location::kRegister) {
953 scratch = LocationFrom(vixl_temps_.AcquireX());
954 } else {
955 DCHECK(kind == Location::kFpuRegister);
956 scratch = LocationFrom(vixl_temps_.AcquireD());
957 }
958 AddScratchLocation(scratch);
959 return scratch;
960}
961
962void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
963 if (loc.IsRegister()) {
964 vixl_temps_.Release(XRegisterFrom(loc));
965 } else {
966 DCHECK(loc.IsFpuRegister());
967 vixl_temps_.Release(DRegisterFrom(loc));
968 }
969 RemoveScratchLocation(loc);
970}
971
Alexandre Rames3e69f162014-12-10 10:36:50 +0000972void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100973 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +0100974 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000975}
976
Alexandre Rames5319def2014-10-23 10:03:10 +0100977void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100978 MacroAssembler* masm = GetVIXLAssembler();
979 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000980 __ Bind(&frame_entry_label_);
981
Serban Constantinescu02164b32014-11-13 14:05:07 +0000982 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
983 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100984 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000985 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000986 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000987 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000988 __ Ldr(wzr, MemOperand(temp, 0));
989 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000990 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100991
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000992 if (!HasEmptyFrame()) {
993 int frame_size = GetFrameSize();
994 // Stack layout:
995 // sp[frame_size - 8] : lr.
996 // ... : other preserved core registers.
997 // ... : other preserved fp registers.
998 // ... : reserved frame space.
999 // sp[0] : current method.
1000 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001001 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001002 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1003 frame_size - GetCoreSpillSize());
1004 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1005 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001006 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001007}
1008
1009void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001010 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001011 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001012 if (!HasEmptyFrame()) {
1013 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001014 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1015 frame_size - FrameEntrySpillSize());
1016 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1017 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001018 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001019 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001020 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001021 __ Ret();
1022 GetAssembler()->cfi().RestoreState();
1023 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001024}
1025
Scott Wakeling97c72b72016-06-24 16:19:36 +01001026CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001027 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001028 return CPURegList(CPURegister::kRegister, kXRegSize,
1029 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001030}
1031
Scott Wakeling97c72b72016-06-24 16:19:36 +01001032CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001033 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1034 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001035 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1036 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001037}
1038
Alexandre Rames5319def2014-10-23 10:03:10 +01001039void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1040 __ Bind(GetLabelOf(block));
1041}
1042
Calin Juravle175dc732015-08-25 15:42:32 +01001043void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1044 DCHECK(location.IsRegister());
1045 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1046}
1047
Calin Juravlee460d1d2015-09-29 04:52:17 +01001048void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1049 if (location.IsRegister()) {
1050 locations->AddTemp(location);
1051 } else {
1052 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1053 }
1054}
1055
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001056void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001057 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001058 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001059 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001060 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001061 if (value_can_be_null) {
1062 __ Cbz(value, &done);
1063 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001064 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001065 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001066 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001067 if (value_can_be_null) {
1068 __ Bind(&done);
1069 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001070}
1071
David Brazdil58282f42016-01-14 12:45:10 +00001072void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001073 // Blocked core registers:
1074 // lr : Runtime reserved.
1075 // tr : Runtime reserved.
1076 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1077 // ip1 : VIXL core temp.
1078 // ip0 : VIXL core temp.
1079 //
1080 // Blocked fp registers:
1081 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001082 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1083 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001084 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001085 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001086 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001087
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001088 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001089 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001090 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001091 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001092
David Brazdil58282f42016-01-14 12:45:10 +00001093 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001094 // Stubs do not save callee-save floating point registers. If the graph
1095 // is debuggable, we need to deal with these registers differently. For
1096 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001097 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1098 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001099 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001100 }
1101 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001102}
1103
Alexandre Rames3e69f162014-12-10 10:36:50 +00001104size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1105 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1106 __ Str(reg, MemOperand(sp, stack_index));
1107 return kArm64WordSize;
1108}
1109
1110size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1111 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1112 __ Ldr(reg, MemOperand(sp, stack_index));
1113 return kArm64WordSize;
1114}
1115
1116size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1117 FPRegister reg = FPRegister(reg_id, kDRegSize);
1118 __ Str(reg, MemOperand(sp, stack_index));
1119 return kArm64WordSize;
1120}
1121
1122size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1123 FPRegister reg = FPRegister(reg_id, kDRegSize);
1124 __ Ldr(reg, MemOperand(sp, stack_index));
1125 return kArm64WordSize;
1126}
1127
Alexandre Rames5319def2014-10-23 10:03:10 +01001128void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001129 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001130}
1131
1132void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001133 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001134}
1135
Alexandre Rames67555f72014-11-18 10:55:16 +00001136void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001137 if (constant->IsIntConstant()) {
1138 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1139 } else if (constant->IsLongConstant()) {
1140 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1141 } else if (constant->IsNullConstant()) {
1142 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001143 } else if (constant->IsFloatConstant()) {
1144 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1145 } else {
1146 DCHECK(constant->IsDoubleConstant());
1147 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1148 }
1149}
1150
Alexandre Rames3e69f162014-12-10 10:36:50 +00001151
1152static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1153 DCHECK(constant.IsConstant());
1154 HConstant* cst = constant.GetConstant();
1155 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001156 // Null is mapped to a core W register, which we associate with kPrimInt.
1157 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001158 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1159 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1160 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1161}
1162
Calin Juravlee460d1d2015-09-29 04:52:17 +01001163void CodeGeneratorARM64::MoveLocation(Location destination,
1164 Location source,
1165 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001166 if (source.Equals(destination)) {
1167 return;
1168 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001169
1170 // A valid move can always be inferred from the destination and source
1171 // locations. When moving from and to a register, the argument type can be
1172 // used to generate 32bit instead of 64bit moves. In debug mode we also
1173 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001174 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001175
1176 if (destination.IsRegister() || destination.IsFpuRegister()) {
1177 if (unspecified_type) {
1178 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1179 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001180 (src_cst != nullptr && (src_cst->IsIntConstant()
1181 || src_cst->IsFloatConstant()
1182 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001183 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001184 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001185 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001186 // If the source is a double stack slot or a 64bit constant, a 64bit
1187 // type is appropriate. Else the source is a register, and since the
1188 // type has not been specified, we chose a 64bit type to force a 64bit
1189 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001190 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001191 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001192 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001193 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1194 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1195 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001196 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1197 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1198 __ Ldr(dst, StackOperandFrom(source));
1199 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001200 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001201 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001202 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001203 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001204 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001205 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001206 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001207 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1208 ? Primitive::kPrimLong
1209 : Primitive::kPrimInt;
1210 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1211 }
1212 } else {
1213 DCHECK(source.IsFpuRegister());
1214 if (destination.IsRegister()) {
1215 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1216 ? Primitive::kPrimDouble
1217 : Primitive::kPrimFloat;
1218 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1219 } else {
1220 DCHECK(destination.IsFpuRegister());
1221 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001222 }
1223 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001224 } else { // The destination is not a register. It must be a stack slot.
1225 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1226 if (source.IsRegister() || source.IsFpuRegister()) {
1227 if (unspecified_type) {
1228 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001229 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001230 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001231 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001232 }
1233 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001234 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1235 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1236 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001237 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001238 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1239 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001240 UseScratchRegisterScope temps(GetVIXLAssembler());
1241 HConstant* src_cst = source.GetConstant();
1242 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001243 if (src_cst->IsZeroBitPattern()) {
1244 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant()) ? xzr : wzr;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001245 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001246 if (src_cst->IsIntConstant()) {
1247 temp = temps.AcquireW();
1248 } else if (src_cst->IsLongConstant()) {
1249 temp = temps.AcquireX();
1250 } else if (src_cst->IsFloatConstant()) {
1251 temp = temps.AcquireS();
1252 } else {
1253 DCHECK(src_cst->IsDoubleConstant());
1254 temp = temps.AcquireD();
1255 }
1256 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001257 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001258 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001259 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001260 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001261 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001262 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001263 // There is generally less pressure on FP registers.
1264 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001265 __ Ldr(temp, StackOperandFrom(source));
1266 __ Str(temp, StackOperandFrom(destination));
1267 }
1268 }
1269}
1270
1271void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001272 CPURegister dst,
1273 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001274 switch (type) {
1275 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001276 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001277 break;
1278 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001279 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001280 break;
1281 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001282 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001283 break;
1284 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001285 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001286 break;
1287 case Primitive::kPrimInt:
1288 case Primitive::kPrimNot:
1289 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001290 case Primitive::kPrimFloat:
1291 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001292 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001293 __ Ldr(dst, src);
1294 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001295 case Primitive::kPrimVoid:
1296 LOG(FATAL) << "Unreachable type " << type;
1297 }
1298}
1299
Calin Juravle77520bc2015-01-12 18:45:46 +00001300void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001301 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001302 const MemOperand& src,
1303 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001304 MacroAssembler* masm = GetVIXLAssembler();
1305 BlockPoolsScope block_pools(masm);
1306 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001307 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001308 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001309
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001310 DCHECK(!src.IsPreIndex());
1311 DCHECK(!src.IsPostIndex());
1312
1313 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001314 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001315 MemOperand base = MemOperand(temp_base);
1316 switch (type) {
1317 case Primitive::kPrimBoolean:
1318 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001319 if (needs_null_check) {
1320 MaybeRecordImplicitNullCheck(instruction);
1321 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001322 break;
1323 case Primitive::kPrimByte:
1324 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001325 if (needs_null_check) {
1326 MaybeRecordImplicitNullCheck(instruction);
1327 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001328 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1329 break;
1330 case Primitive::kPrimChar:
1331 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001332 if (needs_null_check) {
1333 MaybeRecordImplicitNullCheck(instruction);
1334 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001335 break;
1336 case Primitive::kPrimShort:
1337 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001338 if (needs_null_check) {
1339 MaybeRecordImplicitNullCheck(instruction);
1340 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001341 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1342 break;
1343 case Primitive::kPrimInt:
1344 case Primitive::kPrimNot:
1345 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001346 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001347 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001348 if (needs_null_check) {
1349 MaybeRecordImplicitNullCheck(instruction);
1350 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001351 break;
1352 case Primitive::kPrimFloat:
1353 case Primitive::kPrimDouble: {
1354 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001355 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001356
1357 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1358 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001359 if (needs_null_check) {
1360 MaybeRecordImplicitNullCheck(instruction);
1361 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001362 __ Fmov(FPRegister(dst), temp);
1363 break;
1364 }
1365 case Primitive::kPrimVoid:
1366 LOG(FATAL) << "Unreachable type " << type;
1367 }
1368}
1369
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001370void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001371 CPURegister src,
1372 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001373 switch (type) {
1374 case Primitive::kPrimBoolean:
1375 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001376 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001377 break;
1378 case Primitive::kPrimChar:
1379 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001380 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001381 break;
1382 case Primitive::kPrimInt:
1383 case Primitive::kPrimNot:
1384 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001385 case Primitive::kPrimFloat:
1386 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001387 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001388 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001389 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001390 case Primitive::kPrimVoid:
1391 LOG(FATAL) << "Unreachable type " << type;
1392 }
1393}
1394
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001395void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1396 CPURegister src,
1397 const MemOperand& dst) {
1398 UseScratchRegisterScope temps(GetVIXLAssembler());
1399 Register temp_base = temps.AcquireX();
1400
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001401 DCHECK(!dst.IsPreIndex());
1402 DCHECK(!dst.IsPostIndex());
1403
1404 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001405 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001406 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001407 MemOperand base = MemOperand(temp_base);
1408 switch (type) {
1409 case Primitive::kPrimBoolean:
1410 case Primitive::kPrimByte:
1411 __ Stlrb(Register(src), base);
1412 break;
1413 case Primitive::kPrimChar:
1414 case Primitive::kPrimShort:
1415 __ Stlrh(Register(src), base);
1416 break;
1417 case Primitive::kPrimInt:
1418 case Primitive::kPrimNot:
1419 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001420 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001421 __ Stlr(Register(src), base);
1422 break;
1423 case Primitive::kPrimFloat:
1424 case Primitive::kPrimDouble: {
Alexandre Rames542361f2015-01-29 16:57:31 +00001425 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001426 Register temp_src;
1427 if (src.IsZero()) {
1428 // The zero register is used to avoid synthesizing zero constants.
1429 temp_src = Register(src);
1430 } else {
1431 DCHECK(src.IsFPRegister());
1432 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1433 __ Fmov(temp_src, FPRegister(src));
1434 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001435
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001436 __ Stlr(temp_src, base);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001437 break;
1438 }
1439 case Primitive::kPrimVoid:
1440 LOG(FATAL) << "Unreachable type " << type;
1441 }
1442}
1443
Calin Juravle175dc732015-08-25 15:42:32 +01001444void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1445 HInstruction* instruction,
1446 uint32_t dex_pc,
1447 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001448 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001449 GenerateInvokeRuntime(GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value());
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001450 if (EntrypointRequiresStackMap(entrypoint)) {
1451 RecordPcInfo(instruction, dex_pc, slow_path);
1452 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001453}
1454
Roland Levillaindec8f632016-07-22 17:10:06 +01001455void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1456 HInstruction* instruction,
1457 SlowPathCode* slow_path) {
1458 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001459 GenerateInvokeRuntime(entry_point_offset);
1460}
1461
1462void CodeGeneratorARM64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001463 BlockPoolsScope block_pools(GetVIXLAssembler());
1464 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1465 __ Blr(lr);
1466}
1467
Alexandre Rames67555f72014-11-18 10:55:16 +00001468void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001469 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001470 UseScratchRegisterScope temps(GetVIXLAssembler());
1471 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001472 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1473
Serban Constantinescu02164b32014-11-13 14:05:07 +00001474 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001475 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1476 __ Add(temp, class_reg, status_offset);
1477 __ Ldar(temp, HeapOperand(temp));
1478 __ Cmp(temp, mirror::Class::kStatusInitialized);
1479 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001480 __ Bind(slow_path->GetExitLabel());
1481}
Alexandre Rames5319def2014-10-23 10:03:10 +01001482
Roland Levillain44015862016-01-22 11:47:17 +00001483void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001484 BarrierType type = BarrierAll;
1485
1486 switch (kind) {
1487 case MemBarrierKind::kAnyAny:
1488 case MemBarrierKind::kAnyStore: {
1489 type = BarrierAll;
1490 break;
1491 }
1492 case MemBarrierKind::kLoadAny: {
1493 type = BarrierReads;
1494 break;
1495 }
1496 case MemBarrierKind::kStoreStore: {
1497 type = BarrierWrites;
1498 break;
1499 }
1500 default:
1501 LOG(FATAL) << "Unexpected memory barrier " << kind;
1502 }
1503 __ Dmb(InnerShareable, type);
1504}
1505
Serban Constantinescu02164b32014-11-13 14:05:07 +00001506void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1507 HBasicBlock* successor) {
1508 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001509 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1510 if (slow_path == nullptr) {
1511 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1512 instruction->SetSlowPath(slow_path);
1513 codegen_->AddSlowPath(slow_path);
1514 if (successor != nullptr) {
1515 DCHECK(successor->IsLoopHeader());
1516 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1517 }
1518 } else {
1519 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1520 }
1521
Serban Constantinescu02164b32014-11-13 14:05:07 +00001522 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1523 Register temp = temps.AcquireW();
1524
Andreas Gampe542451c2016-07-26 09:02:02 -07001525 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001526 if (successor == nullptr) {
1527 __ Cbnz(temp, slow_path->GetEntryLabel());
1528 __ Bind(slow_path->GetReturnLabel());
1529 } else {
1530 __ Cbz(temp, codegen_->GetLabelOf(successor));
1531 __ B(slow_path->GetEntryLabel());
1532 // slow_path will return to GetLabelOf(successor).
1533 }
1534}
1535
Alexandre Rames5319def2014-10-23 10:03:10 +01001536InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1537 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001538 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001539 assembler_(codegen->GetAssembler()),
1540 codegen_(codegen) {}
1541
1542#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001543 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001544
1545#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1546
1547enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001548 // Using a base helps identify when we hit such breakpoints.
1549 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001550#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1551 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1552#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1553};
1554
1555#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001556 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001557 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1558 } \
1559 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1560 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1561 locations->SetOut(Location::Any()); \
1562 }
1563 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1564#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1565
1566#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001567#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001568
Alexandre Rames67555f72014-11-18 10:55:16 +00001569void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001570 DCHECK_EQ(instr->InputCount(), 2U);
1571 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1572 Primitive::Type type = instr->GetResultType();
1573 switch (type) {
1574 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001575 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001576 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001577 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001578 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001579 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001580
1581 case Primitive::kPrimFloat:
1582 case Primitive::kPrimDouble:
1583 locations->SetInAt(0, Location::RequiresFpuRegister());
1584 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001585 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001586 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001587
Alexandre Rames5319def2014-10-23 10:03:10 +01001588 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001589 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001590 }
1591}
1592
Alexandre Rames09a99962015-04-15 11:47:56 +01001593void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001594 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1595
1596 bool object_field_get_with_read_barrier =
1597 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001598 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001599 new (GetGraph()->GetArena()) LocationSummary(instruction,
1600 object_field_get_with_read_barrier ?
1601 LocationSummary::kCallOnSlowPath :
1602 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01001603 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001604 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01001605 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001606 locations->SetInAt(0, Location::RequiresRegister());
1607 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1608 locations->SetOut(Location::RequiresFpuRegister());
1609 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001610 // The output overlaps for an object field get when read barriers
1611 // are enabled: we do not want the load to overwrite the object's
1612 // location, as we need it to emit the read barrier.
1613 locations->SetOut(
1614 Location::RequiresRegister(),
1615 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001616 }
1617}
1618
1619void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1620 const FieldInfo& field_info) {
1621 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001622 LocationSummary* locations = instruction->GetLocations();
1623 Location base_loc = locations->InAt(0);
1624 Location out = locations->Out();
1625 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001626 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001627 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001628 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001629
Roland Levillain44015862016-01-22 11:47:17 +00001630 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1631 // Object FieldGet with Baker's read barrier case.
1632 MacroAssembler* masm = GetVIXLAssembler();
1633 UseScratchRegisterScope temps(masm);
1634 // /* HeapReference<Object> */ out = *(base + offset)
1635 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1636 Register temp = temps.AcquireW();
1637 // Note that potential implicit null checks are handled in this
1638 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1639 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1640 instruction,
1641 out,
1642 base,
1643 offset,
1644 temp,
1645 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001646 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001647 } else {
1648 // General case.
1649 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001650 // Note that a potential implicit null check is handled in this
1651 // CodeGeneratorARM64::LoadAcquire call.
1652 // NB: LoadAcquire will record the pc info if needed.
1653 codegen_->LoadAcquire(
1654 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001655 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001656 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001657 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001658 }
Roland Levillain44015862016-01-22 11:47:17 +00001659 if (field_type == Primitive::kPrimNot) {
1660 // If read barriers are enabled, emit read barriers other than
1661 // Baker's using a slow path (and also unpoison the loaded
1662 // reference, if heap poisoning is enabled).
1663 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1664 }
Roland Levillain4d027112015-07-01 15:41:14 +01001665 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001666}
1667
1668void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1669 LocationSummary* locations =
1670 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1671 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001672 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
1673 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
1674 } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001675 locations->SetInAt(1, Location::RequiresFpuRegister());
1676 } else {
1677 locations->SetInAt(1, Location::RequiresRegister());
1678 }
1679}
1680
1681void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001682 const FieldInfo& field_info,
1683 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001684 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001685 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001686
1687 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001688 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001689 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001690 Offset offset = field_info.GetFieldOffset();
1691 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001692
Roland Levillain4d027112015-07-01 15:41:14 +01001693 {
1694 // We use a block to end the scratch scope before the write barrier, thus
1695 // freeing the temporary registers so they can be used in `MarkGCCard`.
1696 UseScratchRegisterScope temps(GetVIXLAssembler());
1697
1698 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1699 DCHECK(value.IsW());
1700 Register temp = temps.AcquireW();
1701 __ Mov(temp, value.W());
1702 GetAssembler()->PoisonHeapReference(temp.W());
1703 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001704 }
Roland Levillain4d027112015-07-01 15:41:14 +01001705
1706 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001707 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1708 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001709 } else {
1710 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1711 codegen_->MaybeRecordImplicitNullCheck(instruction);
1712 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001713 }
1714
1715 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001716 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001717 }
1718}
1719
Alexandre Rames67555f72014-11-18 10:55:16 +00001720void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001721 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001722
1723 switch (type) {
1724 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001725 case Primitive::kPrimLong: {
1726 Register dst = OutputRegister(instr);
1727 Register lhs = InputRegisterAt(instr, 0);
1728 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001729 if (instr->IsAdd()) {
1730 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001731 } else if (instr->IsAnd()) {
1732 __ And(dst, lhs, rhs);
1733 } else if (instr->IsOr()) {
1734 __ Orr(dst, lhs, rhs);
1735 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001736 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001737 } else if (instr->IsRor()) {
1738 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001739 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001740 __ Ror(dst, lhs, shift);
1741 } else {
1742 // Ensure shift distance is in the same size register as the result. If
1743 // we are rotating a long and the shift comes in a w register originally,
1744 // we don't need to sxtw for use as an x since the shift distances are
1745 // all & reg_bits - 1.
1746 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1747 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001748 } else {
1749 DCHECK(instr->IsXor());
1750 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001751 }
1752 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001753 }
1754 case Primitive::kPrimFloat:
1755 case Primitive::kPrimDouble: {
1756 FPRegister dst = OutputFPRegister(instr);
1757 FPRegister lhs = InputFPRegisterAt(instr, 0);
1758 FPRegister rhs = InputFPRegisterAt(instr, 1);
1759 if (instr->IsAdd()) {
1760 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001761 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001762 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001763 } else {
1764 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001765 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001766 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001767 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001768 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001769 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001770 }
1771}
1772
Serban Constantinescu02164b32014-11-13 14:05:07 +00001773void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1774 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1775
1776 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1777 Primitive::Type type = instr->GetResultType();
1778 switch (type) {
1779 case Primitive::kPrimInt:
1780 case Primitive::kPrimLong: {
1781 locations->SetInAt(0, Location::RequiresRegister());
1782 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1783 locations->SetOut(Location::RequiresRegister());
1784 break;
1785 }
1786 default:
1787 LOG(FATAL) << "Unexpected shift type " << type;
1788 }
1789}
1790
1791void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1792 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1793
1794 Primitive::Type type = instr->GetType();
1795 switch (type) {
1796 case Primitive::kPrimInt:
1797 case Primitive::kPrimLong: {
1798 Register dst = OutputRegister(instr);
1799 Register lhs = InputRegisterAt(instr, 0);
1800 Operand rhs = InputOperandAt(instr, 1);
1801 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001802 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00001803 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001804 if (instr->IsShl()) {
1805 __ Lsl(dst, lhs, shift_value);
1806 } else if (instr->IsShr()) {
1807 __ Asr(dst, lhs, shift_value);
1808 } else {
1809 __ Lsr(dst, lhs, shift_value);
1810 }
1811 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001812 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001813
1814 if (instr->IsShl()) {
1815 __ Lsl(dst, lhs, rhs_reg);
1816 } else if (instr->IsShr()) {
1817 __ Asr(dst, lhs, rhs_reg);
1818 } else {
1819 __ Lsr(dst, lhs, rhs_reg);
1820 }
1821 }
1822 break;
1823 }
1824 default:
1825 LOG(FATAL) << "Unexpected shift operation type " << type;
1826 }
1827}
1828
Alexandre Rames5319def2014-10-23 10:03:10 +01001829void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001830 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001831}
1832
1833void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001834 HandleBinaryOp(instruction);
1835}
1836
1837void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1838 HandleBinaryOp(instruction);
1839}
1840
1841void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1842 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001843}
1844
Artem Serov7fc63502016-02-09 17:15:29 +00001845void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001846 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1847 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1848 locations->SetInAt(0, Location::RequiresRegister());
1849 // There is no immediate variant of negated bitwise instructions in AArch64.
1850 locations->SetInAt(1, Location::RequiresRegister());
1851 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1852}
1853
Artem Serov7fc63502016-02-09 17:15:29 +00001854void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001855 Register dst = OutputRegister(instr);
1856 Register lhs = InputRegisterAt(instr, 0);
1857 Register rhs = InputRegisterAt(instr, 1);
1858
1859 switch (instr->GetOpKind()) {
1860 case HInstruction::kAnd:
1861 __ Bic(dst, lhs, rhs);
1862 break;
1863 case HInstruction::kOr:
1864 __ Orn(dst, lhs, rhs);
1865 break;
1866 case HInstruction::kXor:
1867 __ Eon(dst, lhs, rhs);
1868 break;
1869 default:
1870 LOG(FATAL) << "Unreachable";
1871 }
1872}
1873
Alexandre Rames8626b742015-11-25 16:28:08 +00001874void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1875 HArm64DataProcWithShifterOp* instruction) {
1876 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1877 instruction->GetType() == Primitive::kPrimLong);
1878 LocationSummary* locations =
1879 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1880 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1881 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1882 } else {
1883 locations->SetInAt(0, Location::RequiresRegister());
1884 }
1885 locations->SetInAt(1, Location::RequiresRegister());
1886 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1887}
1888
1889void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1890 HArm64DataProcWithShifterOp* instruction) {
1891 Primitive::Type type = instruction->GetType();
1892 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1893 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1894 Register out = OutputRegister(instruction);
1895 Register left;
1896 if (kind != HInstruction::kNeg) {
1897 left = InputRegisterAt(instruction, 0);
1898 }
1899 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1900 // shifter operand operation, the IR generating `right_reg` (input to the type
1901 // conversion) can have a different type from the current instruction's type,
1902 // so we manually indicate the type.
1903 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001904 int64_t shift_amount = instruction->GetShiftAmount() &
1905 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001906
1907 Operand right_operand(0);
1908
1909 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1910 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1911 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1912 } else {
1913 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1914 }
1915
1916 // Logical binary operations do not support extension operations in the
1917 // operand. Note that VIXL would still manage if it was passed by generating
1918 // the extension as a separate instruction.
1919 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1920 DCHECK(!right_operand.IsExtendedRegister() ||
1921 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1922 kind != HInstruction::kNeg));
1923 switch (kind) {
1924 case HInstruction::kAdd:
1925 __ Add(out, left, right_operand);
1926 break;
1927 case HInstruction::kAnd:
1928 __ And(out, left, right_operand);
1929 break;
1930 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001931 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001932 __ Neg(out, right_operand);
1933 break;
1934 case HInstruction::kOr:
1935 __ Orr(out, left, right_operand);
1936 break;
1937 case HInstruction::kSub:
1938 __ Sub(out, left, right_operand);
1939 break;
1940 case HInstruction::kXor:
1941 __ Eor(out, left, right_operand);
1942 break;
1943 default:
1944 LOG(FATAL) << "Unexpected operation kind: " << kind;
1945 UNREACHABLE();
1946 }
1947}
1948
Artem Serov328429f2016-07-06 16:23:04 +01001949void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00001950 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
1951 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001952 LocationSummary* locations =
1953 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1954 locations->SetInAt(0, Location::RequiresRegister());
1955 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1956 locations->SetOut(Location::RequiresRegister());
1957}
1958
Roland Levillain4a3aa572016-08-15 13:17:06 +00001959void InstructionCodeGeneratorARM64::VisitIntermediateAddress(
1960 HIntermediateAddress* instruction) {
1961 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
1962 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001963 __ Add(OutputRegister(instruction),
1964 InputRegisterAt(instruction, 0),
1965 Operand(InputOperandAt(instruction, 1)));
1966}
1967
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001968void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001969 LocationSummary* locations =
1970 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001971 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
1972 if (instr->GetOpKind() == HInstruction::kSub &&
1973 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00001974 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001975 // Don't allocate register for Mneg instruction.
1976 } else {
1977 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
1978 Location::RequiresRegister());
1979 }
1980 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
1981 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00001982 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1983}
1984
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001985void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001986 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001987 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
1988 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00001989
1990 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
1991 // This fixup should be carried out for all multiply-accumulate instructions:
1992 // madd, msub, smaddl, smsubl, umaddl and umsubl.
1993 if (instr->GetType() == Primitive::kPrimLong &&
1994 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
1995 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01001996 vixl::aarch64::Instruction* prev =
1997 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00001998 if (prev->IsLoadOrStore()) {
1999 // Make sure we emit only exactly one nop.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002000 vixl::aarch64::CodeBufferCheckScope scope(masm,
2001 kInstructionSize,
2002 vixl::aarch64::CodeBufferCheckScope::kCheck,
2003 vixl::aarch64::CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002004 __ nop();
2005 }
2006 }
2007
2008 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002009 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002010 __ Madd(res, mul_left, mul_right, accumulator);
2011 } else {
2012 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002013 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002014 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002015 __ Mneg(res, mul_left, mul_right);
2016 } else {
2017 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2018 __ Msub(res, mul_left, mul_right, accumulator);
2019 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002020 }
2021}
2022
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002023void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002024 bool object_array_get_with_read_barrier =
2025 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002026 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002027 new (GetGraph()->GetArena()) LocationSummary(instruction,
2028 object_array_get_with_read_barrier ?
2029 LocationSummary::kCallOnSlowPath :
2030 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002031 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002032 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01002033 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002034 locations->SetInAt(0, Location::RequiresRegister());
2035 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002036 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2037 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2038 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002039 // The output overlaps in the case of an object array get with
2040 // read barriers enabled: we do not want the move to overwrite the
2041 // array's location, as we need it to emit the read barrier.
2042 locations->SetOut(
2043 Location::RequiresRegister(),
2044 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002045 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002046}
2047
2048void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002049 Primitive::Type type = instruction->GetType();
2050 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002051 LocationSummary* locations = instruction->GetLocations();
2052 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002053 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002054 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002055
Alexandre Ramesd921d642015-04-16 15:07:16 +01002056 MacroAssembler* masm = GetVIXLAssembler();
2057 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002058 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002059 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002060
Roland Levillain44015862016-01-22 11:47:17 +00002061 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2062 // Object ArrayGet with Baker's read barrier case.
2063 Register temp = temps.AcquireW();
Roland Levillain4a3aa572016-08-15 13:17:06 +00002064 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
2065 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Roland Levillain44015862016-01-22 11:47:17 +00002066 // Note that a potential implicit null check is handled in the
2067 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2068 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2069 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002070 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002071 // General case.
2072 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002073 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002074 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2075 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002076 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002077 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002078 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002079 // The read barrier instrumentation does not support the
2080 // HIntermediateAddress instruction yet.
2081 DCHECK(!kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00002082 // We do not need to compute the intermediate address from the array: the
2083 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002084 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002085 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002086 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002087 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2088 }
2089 temp = obj;
2090 } else {
2091 __ Add(temp, obj, offset);
2092 }
2093 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2094 }
2095
2096 codegen_->Load(type, OutputCPURegister(instruction), source);
2097 codegen_->MaybeRecordImplicitNullCheck(instruction);
2098
2099 if (type == Primitive::kPrimNot) {
2100 static_assert(
2101 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2102 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2103 Location obj_loc = locations->InAt(0);
2104 if (index.IsConstant()) {
2105 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2106 } else {
2107 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2108 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002109 }
Roland Levillain4d027112015-07-01 15:41:14 +01002110 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002111}
2112
Alexandre Rames5319def2014-10-23 10:03:10 +01002113void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2114 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2115 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002116 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002117}
2118
2119void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002120 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002121 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Markodce016e2016-04-28 13:10:02 +01002122 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002123 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002124}
2125
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002126void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002127 Primitive::Type value_type = instruction->GetComponentType();
2128
2129 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002130 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2131 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002132 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002133 LocationSummary::kCallOnSlowPath :
2134 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002135 locations->SetInAt(0, Location::RequiresRegister());
2136 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002137 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2138 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2139 } else if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002140 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002141 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002142 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002143 }
2144}
2145
2146void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2147 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002148 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002149 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002150 bool needs_write_barrier =
2151 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002152
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002153 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002154 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002155 CPURegister source = value;
2156 Location index = locations->InAt(1);
2157 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2158 MemOperand destination = HeapOperand(array);
2159 MacroAssembler* masm = GetVIXLAssembler();
2160 BlockPoolsScope block_pools(masm);
2161
2162 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002163 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002164 if (index.IsConstant()) {
2165 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2166 destination = HeapOperand(array, offset);
2167 } else {
2168 UseScratchRegisterScope temps(masm);
2169 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002170 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002171 // The read barrier instrumentation does not support the
2172 // HIntermediateAddress instruction yet.
2173 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002174 // We do not need to compute the intermediate address from the array: the
2175 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002176 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002177 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002178 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002179 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2180 }
2181 temp = array;
2182 } else {
2183 __ Add(temp, array, offset);
2184 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002185 destination = HeapOperand(temp,
2186 XRegisterFrom(index),
2187 LSL,
2188 Primitive::ComponentSizeShift(value_type));
2189 }
2190 codegen_->Store(value_type, value, destination);
2191 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002192 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002193 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002194 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002195 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002196 {
2197 // We use a block to end the scratch scope before the write barrier, thus
2198 // freeing the temporary registers so they can be used in `MarkGCCard`.
2199 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002200 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002201 if (index.IsConstant()) {
2202 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002203 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002204 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002205 destination = HeapOperand(temp,
2206 XRegisterFrom(index),
2207 LSL,
2208 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002209 }
2210
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002211 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2212 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2213 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2214
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002215 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002216 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2217 codegen_->AddSlowPath(slow_path);
2218 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002219 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002220 __ Cbnz(Register(value), &non_zero);
2221 if (!index.IsConstant()) {
2222 __ Add(temp, array, offset);
2223 }
2224 __ Str(wzr, destination);
2225 codegen_->MaybeRecordImplicitNullCheck(instruction);
2226 __ B(&done);
2227 __ Bind(&non_zero);
2228 }
2229
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002230 // Note that when Baker read barriers are enabled, the type
2231 // checks are performed without read barriers. This is fine,
2232 // even in the case where a class object is in the from-space
2233 // after the flip, as a comparison involving such a type would
2234 // not produce a false positive; it may of course produce a
2235 // false negative, in which case we would take the ArraySet
2236 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01002237
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002238 Register temp2 = temps.AcquireSameSizeAs(array);
2239 // /* HeapReference<Class> */ temp = array->klass_
2240 __ Ldr(temp, HeapOperand(array, class_offset));
2241 codegen_->MaybeRecordImplicitNullCheck(instruction);
2242 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01002243
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002244 // /* HeapReference<Class> */ temp = temp->component_type_
2245 __ Ldr(temp, HeapOperand(temp, component_offset));
2246 // /* HeapReference<Class> */ temp2 = value->klass_
2247 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2248 // If heap poisoning is enabled, no need to unpoison `temp`
2249 // nor `temp2`, as we are comparing two poisoned references.
2250 __ Cmp(temp, temp2);
2251 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01002252
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002253 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2254 vixl::aarch64::Label do_put;
2255 __ B(eq, &do_put);
2256 // If heap poisoning is enabled, the `temp` reference has
2257 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002258 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2259
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002260 // /* HeapReference<Class> */ temp = temp->super_class_
2261 __ Ldr(temp, HeapOperand(temp, super_offset));
2262 // If heap poisoning is enabled, no need to unpoison
2263 // `temp`, as we are comparing against null below.
2264 __ Cbnz(temp, slow_path->GetEntryLabel());
2265 __ Bind(&do_put);
2266 } else {
2267 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002268 }
2269 }
2270
2271 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002272 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002273 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002274 __ Mov(temp2, value.W());
2275 GetAssembler()->PoisonHeapReference(temp2);
2276 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002277 }
2278
2279 if (!index.IsConstant()) {
2280 __ Add(temp, array, offset);
2281 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002282 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002283
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002284 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002285 codegen_->MaybeRecordImplicitNullCheck(instruction);
2286 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002287 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002288
2289 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2290
2291 if (done.IsLinked()) {
2292 __ Bind(&done);
2293 }
2294
2295 if (slow_path != nullptr) {
2296 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002297 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002298 }
2299}
2300
Alexandre Rames67555f72014-11-18 10:55:16 +00002301void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002302 RegisterSet caller_saves = RegisterSet::Empty();
2303 InvokeRuntimeCallingConvention calling_convention;
2304 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2305 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
2306 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00002307 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002308 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002309}
2310
2311void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002312 BoundsCheckSlowPathARM64* slow_path =
2313 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002314 codegen_->AddSlowPath(slow_path);
2315
2316 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2317 __ B(slow_path->GetEntryLabel(), hs);
2318}
2319
Alexandre Rames67555f72014-11-18 10:55:16 +00002320void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2321 LocationSummary* locations =
2322 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2323 locations->SetInAt(0, Location::RequiresRegister());
2324 if (check->HasUses()) {
2325 locations->SetOut(Location::SameAsFirstInput());
2326 }
2327}
2328
2329void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2330 // We assume the class is not null.
2331 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2332 check->GetLoadClass(), check, check->GetDexPc(), true);
2333 codegen_->AddSlowPath(slow_path);
2334 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2335}
2336
Roland Levillain1a653882016-03-18 18:05:57 +00002337static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2338 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2339 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2340}
2341
2342void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2343 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2344 Location rhs_loc = instruction->GetLocations()->InAt(1);
2345 if (rhs_loc.IsConstant()) {
2346 // 0.0 is the only immediate that can be encoded directly in
2347 // an FCMP instruction.
2348 //
2349 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2350 // specify that in a floating-point comparison, positive zero
2351 // and negative zero are considered equal, so we can use the
2352 // literal 0.0 for both cases here.
2353 //
2354 // Note however that some methods (Float.equal, Float.compare,
2355 // Float.compareTo, Double.equal, Double.compare,
2356 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2357 // StrictMath.min) consider 0.0 to be (strictly) greater than
2358 // -0.0. So if we ever translate calls to these methods into a
2359 // HCompare instruction, we must handle the -0.0 case with
2360 // care here.
2361 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2362 __ Fcmp(lhs_reg, 0.0);
2363 } else {
2364 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2365 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002366}
2367
Serban Constantinescu02164b32014-11-13 14:05:07 +00002368void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002369 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002370 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2371 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002372 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002373 case Primitive::kPrimBoolean:
2374 case Primitive::kPrimByte:
2375 case Primitive::kPrimShort:
2376 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002377 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002378 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002379 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002380 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002381 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2382 break;
2383 }
2384 case Primitive::kPrimFloat:
2385 case Primitive::kPrimDouble: {
2386 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002387 locations->SetInAt(1,
2388 IsFloatingPointZeroConstant(compare->InputAt(1))
2389 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2390 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002391 locations->SetOut(Location::RequiresRegister());
2392 break;
2393 }
2394 default:
2395 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2396 }
2397}
2398
2399void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2400 Primitive::Type in_type = compare->InputAt(0)->GetType();
2401
2402 // 0 if: left == right
2403 // 1 if: left > right
2404 // -1 if: left < right
2405 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002406 case Primitive::kPrimBoolean:
2407 case Primitive::kPrimByte:
2408 case Primitive::kPrimShort:
2409 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002410 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002411 case Primitive::kPrimLong: {
2412 Register result = OutputRegister(compare);
2413 Register left = InputRegisterAt(compare, 0);
2414 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002415 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002416 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2417 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002418 break;
2419 }
2420 case Primitive::kPrimFloat:
2421 case Primitive::kPrimDouble: {
2422 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002423 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002424 __ Cset(result, ne);
2425 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002426 break;
2427 }
2428 default:
2429 LOG(FATAL) << "Unimplemented compare type " << in_type;
2430 }
2431}
2432
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002433void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002434 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002435
2436 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2437 locations->SetInAt(0, Location::RequiresFpuRegister());
2438 locations->SetInAt(1,
2439 IsFloatingPointZeroConstant(instruction->InputAt(1))
2440 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2441 : Location::RequiresFpuRegister());
2442 } else {
2443 // Integer cases.
2444 locations->SetInAt(0, Location::RequiresRegister());
2445 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2446 }
2447
David Brazdilb3e773e2016-01-26 11:28:37 +00002448 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002449 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002450 }
2451}
2452
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002453void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002454 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002455 return;
2456 }
2457
2458 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002459 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002460 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002461
Roland Levillain7f63c522015-07-13 15:54:55 +00002462 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002463 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002464 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002465 } else {
2466 // Integer cases.
2467 Register lhs = InputRegisterAt(instruction, 0);
2468 Operand rhs = InputOperandAt(instruction, 1);
2469 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002470 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002471 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002472}
2473
2474#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2475 M(Equal) \
2476 M(NotEqual) \
2477 M(LessThan) \
2478 M(LessThanOrEqual) \
2479 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002480 M(GreaterThanOrEqual) \
2481 M(Below) \
2482 M(BelowOrEqual) \
2483 M(Above) \
2484 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002485#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002486void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2487void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002488FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002489#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002490#undef FOR_EACH_CONDITION_INSTRUCTION
2491
Zheng Xuc6667102015-05-15 16:08:45 +08002492void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2493 DCHECK(instruction->IsDiv() || instruction->IsRem());
2494
2495 LocationSummary* locations = instruction->GetLocations();
2496 Location second = locations->InAt(1);
2497 DCHECK(second.IsConstant());
2498
2499 Register out = OutputRegister(instruction);
2500 Register dividend = InputRegisterAt(instruction, 0);
2501 int64_t imm = Int64FromConstant(second.GetConstant());
2502 DCHECK(imm == 1 || imm == -1);
2503
2504 if (instruction->IsRem()) {
2505 __ Mov(out, 0);
2506 } else {
2507 if (imm == 1) {
2508 __ Mov(out, dividend);
2509 } else {
2510 __ Neg(out, dividend);
2511 }
2512 }
2513}
2514
2515void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2516 DCHECK(instruction->IsDiv() || instruction->IsRem());
2517
2518 LocationSummary* locations = instruction->GetLocations();
2519 Location second = locations->InAt(1);
2520 DCHECK(second.IsConstant());
2521
2522 Register out = OutputRegister(instruction);
2523 Register dividend = InputRegisterAt(instruction, 0);
2524 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002525 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002526 int ctz_imm = CTZ(abs_imm);
2527
2528 UseScratchRegisterScope temps(GetVIXLAssembler());
2529 Register temp = temps.AcquireSameSizeAs(out);
2530
2531 if (instruction->IsDiv()) {
2532 __ Add(temp, dividend, abs_imm - 1);
2533 __ Cmp(dividend, 0);
2534 __ Csel(out, temp, dividend, lt);
2535 if (imm > 0) {
2536 __ Asr(out, out, ctz_imm);
2537 } else {
2538 __ Neg(out, Operand(out, ASR, ctz_imm));
2539 }
2540 } else {
2541 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2542 __ Asr(temp, dividend, bits - 1);
2543 __ Lsr(temp, temp, bits - ctz_imm);
2544 __ Add(out, dividend, temp);
2545 __ And(out, out, abs_imm - 1);
2546 __ Sub(out, out, temp);
2547 }
2548}
2549
2550void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2551 DCHECK(instruction->IsDiv() || instruction->IsRem());
2552
2553 LocationSummary* locations = instruction->GetLocations();
2554 Location second = locations->InAt(1);
2555 DCHECK(second.IsConstant());
2556
2557 Register out = OutputRegister(instruction);
2558 Register dividend = InputRegisterAt(instruction, 0);
2559 int64_t imm = Int64FromConstant(second.GetConstant());
2560
2561 Primitive::Type type = instruction->GetResultType();
2562 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2563
2564 int64_t magic;
2565 int shift;
2566 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2567
2568 UseScratchRegisterScope temps(GetVIXLAssembler());
2569 Register temp = temps.AcquireSameSizeAs(out);
2570
2571 // temp = get_high(dividend * magic)
2572 __ Mov(temp, magic);
2573 if (type == Primitive::kPrimLong) {
2574 __ Smulh(temp, dividend, temp);
2575 } else {
2576 __ Smull(temp.X(), dividend, temp);
2577 __ Lsr(temp.X(), temp.X(), 32);
2578 }
2579
2580 if (imm > 0 && magic < 0) {
2581 __ Add(temp, temp, dividend);
2582 } else if (imm < 0 && magic > 0) {
2583 __ Sub(temp, temp, dividend);
2584 }
2585
2586 if (shift != 0) {
2587 __ Asr(temp, temp, shift);
2588 }
2589
2590 if (instruction->IsDiv()) {
2591 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2592 } else {
2593 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2594 // TODO: Strength reduction for msub.
2595 Register temp_imm = temps.AcquireSameSizeAs(out);
2596 __ Mov(temp_imm, imm);
2597 __ Msub(out, temp, temp_imm, dividend);
2598 }
2599}
2600
2601void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2602 DCHECK(instruction->IsDiv() || instruction->IsRem());
2603 Primitive::Type type = instruction->GetResultType();
2604 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2605
2606 LocationSummary* locations = instruction->GetLocations();
2607 Register out = OutputRegister(instruction);
2608 Location second = locations->InAt(1);
2609
2610 if (second.IsConstant()) {
2611 int64_t imm = Int64FromConstant(second.GetConstant());
2612
2613 if (imm == 0) {
2614 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2615 } else if (imm == 1 || imm == -1) {
2616 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002617 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002618 DivRemByPowerOfTwo(instruction);
2619 } else {
2620 DCHECK(imm <= -2 || imm >= 2);
2621 GenerateDivRemWithAnyConstant(instruction);
2622 }
2623 } else {
2624 Register dividend = InputRegisterAt(instruction, 0);
2625 Register divisor = InputRegisterAt(instruction, 1);
2626 if (instruction->IsDiv()) {
2627 __ Sdiv(out, dividend, divisor);
2628 } else {
2629 UseScratchRegisterScope temps(GetVIXLAssembler());
2630 Register temp = temps.AcquireSameSizeAs(out);
2631 __ Sdiv(temp, dividend, divisor);
2632 __ Msub(out, temp, divisor, dividend);
2633 }
2634 }
2635}
2636
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002637void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2638 LocationSummary* locations =
2639 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2640 switch (div->GetResultType()) {
2641 case Primitive::kPrimInt:
2642 case Primitive::kPrimLong:
2643 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002644 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002645 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2646 break;
2647
2648 case Primitive::kPrimFloat:
2649 case Primitive::kPrimDouble:
2650 locations->SetInAt(0, Location::RequiresFpuRegister());
2651 locations->SetInAt(1, Location::RequiresFpuRegister());
2652 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2653 break;
2654
2655 default:
2656 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2657 }
2658}
2659
2660void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2661 Primitive::Type type = div->GetResultType();
2662 switch (type) {
2663 case Primitive::kPrimInt:
2664 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002665 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002666 break;
2667
2668 case Primitive::kPrimFloat:
2669 case Primitive::kPrimDouble:
2670 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2671 break;
2672
2673 default:
2674 LOG(FATAL) << "Unexpected div type " << type;
2675 }
2676}
2677
Alexandre Rames67555f72014-11-18 10:55:16 +00002678void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002679 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002680 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00002681}
2682
2683void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2684 SlowPathCodeARM64* slow_path =
2685 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2686 codegen_->AddSlowPath(slow_path);
2687 Location value = instruction->GetLocations()->InAt(0);
2688
Alexandre Rames3e69f162014-12-10 10:36:50 +00002689 Primitive::Type type = instruction->GetType();
2690
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002691 if (!Primitive::IsIntegralType(type)) {
2692 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002693 return;
2694 }
2695
Alexandre Rames67555f72014-11-18 10:55:16 +00002696 if (value.IsConstant()) {
2697 int64_t divisor = Int64ConstantFrom(value);
2698 if (divisor == 0) {
2699 __ B(slow_path->GetEntryLabel());
2700 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002701 // A division by a non-null constant is valid. We don't need to perform
2702 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002703 }
2704 } else {
2705 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2706 }
2707}
2708
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002709void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2710 LocationSummary* locations =
2711 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2712 locations->SetOut(Location::ConstantLocation(constant));
2713}
2714
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002715void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2716 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002717 // Will be generated at use site.
2718}
2719
Alexandre Rames5319def2014-10-23 10:03:10 +01002720void LocationsBuilderARM64::VisitExit(HExit* exit) {
2721 exit->SetLocations(nullptr);
2722}
2723
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002724void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002725}
2726
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002727void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2728 LocationSummary* locations =
2729 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2730 locations->SetOut(Location::ConstantLocation(constant));
2731}
2732
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002733void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002734 // Will be generated at use site.
2735}
2736
David Brazdilfc6a86a2015-06-26 10:33:45 +00002737void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002738 DCHECK(!successor->IsExitBlock());
2739 HBasicBlock* block = got->GetBlock();
2740 HInstruction* previous = got->GetPrevious();
2741 HLoopInformation* info = block->GetLoopInformation();
2742
David Brazdil46e2a392015-03-16 17:31:52 +00002743 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002744 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2745 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2746 return;
2747 }
2748 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2749 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2750 }
2751 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002752 __ B(codegen_->GetLabelOf(successor));
2753 }
2754}
2755
David Brazdilfc6a86a2015-06-26 10:33:45 +00002756void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2757 got->SetLocations(nullptr);
2758}
2759
2760void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2761 HandleGoto(got, got->GetSuccessor());
2762}
2763
2764void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2765 try_boundary->SetLocations(nullptr);
2766}
2767
2768void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2769 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2770 if (!successor->IsExitBlock()) {
2771 HandleGoto(try_boundary, successor);
2772 }
2773}
2774
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002775void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002776 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002777 vixl::aarch64::Label* true_target,
2778 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002779 // FP branching requires both targets to be explicit. If either of the targets
2780 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002781 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002782 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002783
David Brazdil0debae72015-11-12 18:37:00 +00002784 if (true_target == nullptr && false_target == nullptr) {
2785 // Nothing to do. The code always falls through.
2786 return;
2787 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002788 // Constant condition, statically compared against "true" (integer value 1).
2789 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002790 if (true_target != nullptr) {
2791 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002792 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002793 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002794 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002795 if (false_target != nullptr) {
2796 __ B(false_target);
2797 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002798 }
David Brazdil0debae72015-11-12 18:37:00 +00002799 return;
2800 }
2801
2802 // The following code generates these patterns:
2803 // (1) true_target == nullptr && false_target != nullptr
2804 // - opposite condition true => branch to false_target
2805 // (2) true_target != nullptr && false_target == nullptr
2806 // - condition true => branch to true_target
2807 // (3) true_target != nullptr && false_target != nullptr
2808 // - condition true => branch to true_target
2809 // - branch to false_target
2810 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002811 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002812 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002813 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002814 if (true_target == nullptr) {
2815 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2816 } else {
2817 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2818 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002819 } else {
2820 // The condition instruction has not been materialized, use its inputs as
2821 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002822 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002823
David Brazdil0debae72015-11-12 18:37:00 +00002824 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002825 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002826 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002827 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002828 IfCondition opposite_condition = condition->GetOppositeCondition();
2829 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002830 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002831 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002832 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002833 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002834 // Integer cases.
2835 Register lhs = InputRegisterAt(condition, 0);
2836 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002837
2838 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01002839 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002840 if (true_target == nullptr) {
2841 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2842 non_fallthrough_target = false_target;
2843 } else {
2844 arm64_cond = ARM64Condition(condition->GetCondition());
2845 non_fallthrough_target = true_target;
2846 }
2847
Aart Bik086d27e2016-01-20 17:02:00 -08002848 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01002849 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002850 switch (arm64_cond) {
2851 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002852 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002853 break;
2854 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002855 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002856 break;
2857 case lt:
2858 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002859 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002860 break;
2861 case ge:
2862 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002863 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002864 break;
2865 default:
2866 // Without the `static_cast` the compiler throws an error for
2867 // `-Werror=sign-promo`.
2868 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2869 }
2870 } else {
2871 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002872 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002873 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002874 }
2875 }
David Brazdil0debae72015-11-12 18:37:00 +00002876
2877 // If neither branch falls through (case 3), the conditional branch to `true_target`
2878 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2879 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002880 __ B(false_target);
2881 }
David Brazdil0debae72015-11-12 18:37:00 +00002882
2883 if (fallthrough_target.IsLinked()) {
2884 __ Bind(&fallthrough_target);
2885 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002886}
2887
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002888void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2889 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002890 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002891 locations->SetInAt(0, Location::RequiresRegister());
2892 }
2893}
2894
2895void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002896 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2897 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002898 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
2899 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
2900 true_target = nullptr;
2901 }
2902 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
2903 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
2904 false_target = nullptr;
2905 }
David Brazdil0debae72015-11-12 18:37:00 +00002906 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002907}
2908
2909void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2910 LocationSummary* locations = new (GetGraph()->GetArena())
2911 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01002912 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00002913 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002914 locations->SetInAt(0, Location::RequiresRegister());
2915 }
2916}
2917
2918void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002919 SlowPathCodeARM64* slow_path =
2920 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002921 GenerateTestAndBranch(deoptimize,
2922 /* condition_input_index */ 0,
2923 slow_path->GetEntryLabel(),
2924 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002925}
2926
David Brazdilc0b601b2016-02-08 14:20:45 +00002927static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2928 return condition->IsCondition() &&
2929 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2930}
2931
Alexandre Rames880f1192016-06-13 16:04:50 +01002932static inline Condition GetConditionForSelect(HCondition* condition) {
2933 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00002934 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
2935 : ARM64Condition(cond);
2936}
2937
David Brazdil74eb1b22015-12-14 11:44:01 +00002938void LocationsBuilderARM64::VisitSelect(HSelect* select) {
2939 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01002940 if (Primitive::IsFloatingPointType(select->GetType())) {
2941 locations->SetInAt(0, Location::RequiresFpuRegister());
2942 locations->SetInAt(1, Location::RequiresFpuRegister());
2943 locations->SetOut(Location::RequiresFpuRegister());
2944 } else {
2945 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
2946 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
2947 bool is_true_value_constant = cst_true_value != nullptr;
2948 bool is_false_value_constant = cst_false_value != nullptr;
2949 // Ask VIXL whether we should synthesize constants in registers.
2950 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
2951 Operand true_op = is_true_value_constant ?
2952 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
2953 Operand false_op = is_false_value_constant ?
2954 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
2955 bool true_value_in_register = false;
2956 bool false_value_in_register = false;
2957 MacroAssembler::GetCselSynthesisInformation(
2958 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
2959 true_value_in_register |= !is_true_value_constant;
2960 false_value_in_register |= !is_false_value_constant;
2961
2962 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
2963 : Location::ConstantLocation(cst_true_value));
2964 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
2965 : Location::ConstantLocation(cst_false_value));
2966 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00002967 }
Alexandre Rames880f1192016-06-13 16:04:50 +01002968
David Brazdil74eb1b22015-12-14 11:44:01 +00002969 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
2970 locations->SetInAt(2, Location::RequiresRegister());
2971 }
David Brazdil74eb1b22015-12-14 11:44:01 +00002972}
2973
2974void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00002975 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00002976 Condition csel_cond;
2977
2978 if (IsBooleanValueOrMaterializedCondition(cond)) {
2979 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01002980 // Use the condition flags set by the previous instruction.
2981 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00002982 } else {
2983 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01002984 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00002985 }
2986 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002987 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01002988 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00002989 } else {
2990 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01002991 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00002992 }
2993
Alexandre Rames880f1192016-06-13 16:04:50 +01002994 if (Primitive::IsFloatingPointType(select->GetType())) {
2995 __ Fcsel(OutputFPRegister(select),
2996 InputFPRegisterAt(select, 1),
2997 InputFPRegisterAt(select, 0),
2998 csel_cond);
2999 } else {
3000 __ Csel(OutputRegister(select),
3001 InputOperandAt(select, 1),
3002 InputOperandAt(select, 0),
3003 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003004 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003005}
3006
David Srbecky0cf44932015-12-09 14:09:59 +00003007void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3008 new (GetGraph()->GetArena()) LocationSummary(info);
3009}
3010
David Srbeckyd28f4a02016-03-14 17:14:24 +00003011void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3012 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003013}
3014
3015void CodeGeneratorARM64::GenerateNop() {
3016 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003017}
3018
Alexandre Rames5319def2014-10-23 10:03:10 +01003019void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003020 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003021}
3022
3023void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003024 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003025}
3026
3027void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003028 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003029}
3030
3031void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003032 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003033}
3034
Roland Levillain44015862016-01-22 11:47:17 +00003035static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3036 return kEmitCompilerReadBarrier &&
3037 (kUseBakerReadBarrier ||
3038 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3039 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3040 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3041}
3042
Alexandre Rames67555f72014-11-18 10:55:16 +00003043void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003044 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003045 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003046 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003047 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003048 case TypeCheckKind::kExactCheck:
3049 case TypeCheckKind::kAbstractClassCheck:
3050 case TypeCheckKind::kClassHierarchyCheck:
3051 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003052 call_kind =
3053 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01003054 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003055 break;
3056 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003057 case TypeCheckKind::kUnresolvedCheck:
3058 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003059 call_kind = LocationSummary::kCallOnSlowPath;
3060 break;
3061 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003062
Alexandre Rames67555f72014-11-18 10:55:16 +00003063 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003064 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003065 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003066 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003067 locations->SetInAt(0, Location::RequiresRegister());
3068 locations->SetInAt(1, Location::RequiresRegister());
3069 // The "out" register is used as a temporary, so it overlaps with the inputs.
3070 // Note that TypeCheckSlowPathARM64 uses this register too.
3071 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3072 // When read barriers are enabled, we need a temporary register for
3073 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003074 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003075 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003076 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003077}
3078
3079void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003080 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003081 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003082 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003083 Register obj = InputRegisterAt(instruction, 0);
3084 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003085 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003086 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003087 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3088 locations->GetTemp(0) :
3089 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003090 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3091 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3092 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3093 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003094
Scott Wakeling97c72b72016-06-24 16:19:36 +01003095 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003096 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003097
3098 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003099 // Avoid null check if we know `obj` is not null.
3100 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003101 __ Cbz(obj, &zero);
3102 }
3103
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003104 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003105 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003106
Roland Levillain44015862016-01-22 11:47:17 +00003107 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003108 case TypeCheckKind::kExactCheck: {
3109 __ Cmp(out, cls);
3110 __ Cset(out, eq);
3111 if (zero.IsLinked()) {
3112 __ B(&done);
3113 }
3114 break;
3115 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003116
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003117 case TypeCheckKind::kAbstractClassCheck: {
3118 // If the class is abstract, we eagerly fetch the super class of the
3119 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003120 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003121 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003122 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003123 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003124 // If `out` is null, we use it for the result, and jump to `done`.
3125 __ Cbz(out, &done);
3126 __ Cmp(out, cls);
3127 __ B(ne, &loop);
3128 __ Mov(out, 1);
3129 if (zero.IsLinked()) {
3130 __ B(&done);
3131 }
3132 break;
3133 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003134
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003135 case TypeCheckKind::kClassHierarchyCheck: {
3136 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003137 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003138 __ Bind(&loop);
3139 __ Cmp(out, cls);
3140 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003141 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003142 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003143 __ Cbnz(out, &loop);
3144 // If `out` is null, we use it for the result, and jump to `done`.
3145 __ B(&done);
3146 __ Bind(&success);
3147 __ Mov(out, 1);
3148 if (zero.IsLinked()) {
3149 __ B(&done);
3150 }
3151 break;
3152 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003153
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003154 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003155 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003156 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003157 __ Cmp(out, cls);
3158 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003159 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003160 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003161 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003162 // If `out` is null, we use it for the result, and jump to `done`.
3163 __ Cbz(out, &done);
3164 __ Ldrh(out, HeapOperand(out, primitive_offset));
3165 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3166 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003167 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003168 __ Mov(out, 1);
3169 __ B(&done);
3170 break;
3171 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003172
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003173 case TypeCheckKind::kArrayCheck: {
3174 __ Cmp(out, cls);
3175 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003176 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3177 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003178 codegen_->AddSlowPath(slow_path);
3179 __ B(ne, slow_path->GetEntryLabel());
3180 __ Mov(out, 1);
3181 if (zero.IsLinked()) {
3182 __ B(&done);
3183 }
3184 break;
3185 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003186
Calin Juravle98893e12015-10-02 21:05:03 +01003187 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003188 case TypeCheckKind::kInterfaceCheck: {
3189 // Note that we indeed only call on slow path, but we always go
3190 // into the slow path for the unresolved and interface check
3191 // cases.
3192 //
3193 // We cannot directly call the InstanceofNonTrivial runtime
3194 // entry point without resorting to a type checking slow path
3195 // here (i.e. by calling InvokeRuntime directly), as it would
3196 // require to assign fixed registers for the inputs of this
3197 // HInstanceOf instruction (following the runtime calling
3198 // convention), which might be cluttered by the potential first
3199 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003200 //
3201 // TODO: Introduce a new runtime entry point taking the object
3202 // to test (instead of its class) as argument, and let it deal
3203 // with the read barrier issues. This will let us refactor this
3204 // case of the `switch` code as it was previously (with a direct
3205 // call to the runtime not using a type checking slow path).
3206 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003207 DCHECK(locations->OnlyCallsOnSlowPath());
3208 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3209 /* is_fatal */ false);
3210 codegen_->AddSlowPath(slow_path);
3211 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003212 if (zero.IsLinked()) {
3213 __ B(&done);
3214 }
3215 break;
3216 }
3217 }
3218
3219 if (zero.IsLinked()) {
3220 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003221 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003222 }
3223
3224 if (done.IsLinked()) {
3225 __ Bind(&done);
3226 }
3227
3228 if (slow_path != nullptr) {
3229 __ Bind(slow_path->GetExitLabel());
3230 }
3231}
3232
3233void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3234 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3235 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3236
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003237 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3238 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003239 case TypeCheckKind::kExactCheck:
3240 case TypeCheckKind::kAbstractClassCheck:
3241 case TypeCheckKind::kClassHierarchyCheck:
3242 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003243 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3244 LocationSummary::kCallOnSlowPath :
3245 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003246 break;
3247 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003248 case TypeCheckKind::kUnresolvedCheck:
3249 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003250 call_kind = LocationSummary::kCallOnSlowPath;
3251 break;
3252 }
3253
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003254 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3255 locations->SetInAt(0, Location::RequiresRegister());
3256 locations->SetInAt(1, Location::RequiresRegister());
3257 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3258 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003259 // When read barriers are enabled, we need an additional temporary
3260 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003261 if (TypeCheckNeedsATemporary(type_check_kind)) {
3262 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003263 }
3264}
3265
3266void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003267 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003268 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003269 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003270 Register obj = InputRegisterAt(instruction, 0);
3271 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003272 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003273 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3274 locations->GetTemp(1) :
3275 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003276 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003277 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3278 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3279 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3280 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003281
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003282 bool is_type_check_slow_path_fatal =
3283 (type_check_kind == TypeCheckKind::kExactCheck ||
3284 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3285 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3286 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3287 !instruction->CanThrowIntoCatchBlock();
3288 SlowPathCodeARM64* type_check_slow_path =
3289 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3290 is_type_check_slow_path_fatal);
3291 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003292
Scott Wakeling97c72b72016-06-24 16:19:36 +01003293 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003294 // Avoid null check if we know obj is not null.
3295 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003296 __ Cbz(obj, &done);
3297 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003298
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003299 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003300 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003301
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003302 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003303 case TypeCheckKind::kExactCheck:
3304 case TypeCheckKind::kArrayCheck: {
3305 __ Cmp(temp, cls);
3306 // Jump to slow path for throwing the exception or doing a
3307 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003308 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003309 break;
3310 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003311
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003312 case TypeCheckKind::kAbstractClassCheck: {
3313 // If the class is abstract, we eagerly fetch the super class of the
3314 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003315 vixl::aarch64::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003316 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003317 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003318 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003319
3320 // If the class reference currently in `temp` is not null, jump
3321 // to the `compare_classes` label to compare it with the checked
3322 // class.
3323 __ Cbnz(temp, &compare_classes);
3324 // Otherwise, jump to the slow path to throw the exception.
3325 //
3326 // But before, move back the object's class into `temp` before
3327 // going into the slow path, as it has been overwritten in the
3328 // meantime.
3329 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003330 GenerateReferenceLoadTwoRegisters(
3331 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003332 __ B(type_check_slow_path->GetEntryLabel());
3333
3334 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003335 __ Cmp(temp, cls);
3336 __ B(ne, &loop);
3337 break;
3338 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003339
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003340 case TypeCheckKind::kClassHierarchyCheck: {
3341 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003342 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003343 __ Bind(&loop);
3344 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003345 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003346
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003347 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003348 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003349
3350 // If the class reference currently in `temp` is not null, jump
3351 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003352 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003353 // Otherwise, jump to the slow path to throw the exception.
3354 //
3355 // But before, move back the object's class into `temp` before
3356 // going into the slow path, as it has been overwritten in the
3357 // meantime.
3358 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003359 GenerateReferenceLoadTwoRegisters(
3360 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003361 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003362 break;
3363 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003364
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003365 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003366 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003367 vixl::aarch64::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003368 __ Cmp(temp, cls);
3369 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003370
3371 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003372 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003373 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003374
3375 // If the component type is not null (i.e. the object is indeed
3376 // an array), jump to label `check_non_primitive_component_type`
3377 // to further check that this component type is not a primitive
3378 // type.
3379 __ Cbnz(temp, &check_non_primitive_component_type);
3380 // Otherwise, jump to the slow path to throw the exception.
3381 //
3382 // But before, move back the object's class into `temp` before
3383 // going into the slow path, as it has been overwritten in the
3384 // meantime.
3385 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003386 GenerateReferenceLoadTwoRegisters(
3387 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003388 __ B(type_check_slow_path->GetEntryLabel());
3389
3390 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003391 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3392 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003393 __ Cbz(temp, &done);
3394 // Same comment as above regarding `temp` and the slow path.
3395 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003396 GenerateReferenceLoadTwoRegisters(
3397 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003398 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003399 break;
3400 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003401
Calin Juravle98893e12015-10-02 21:05:03 +01003402 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003403 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003404 // We always go into the type check slow path for the unresolved
3405 // and interface check cases.
3406 //
3407 // We cannot directly call the CheckCast runtime entry point
3408 // without resorting to a type checking slow path here (i.e. by
3409 // calling InvokeRuntime directly), as it would require to
3410 // assign fixed registers for the inputs of this HInstanceOf
3411 // instruction (following the runtime calling convention), which
3412 // might be cluttered by the potential first read barrier
3413 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003414 //
3415 // TODO: Introduce a new runtime entry point taking the object
3416 // to test (instead of its class) as argument, and let it deal
3417 // with the read barrier issues. This will let us refactor this
3418 // case of the `switch` code as it was previously (with a direct
3419 // call to the runtime not using a type checking slow path).
3420 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003421 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003422 break;
3423 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003424 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003425
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003426 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003427}
3428
Alexandre Rames5319def2014-10-23 10:03:10 +01003429void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3430 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3431 locations->SetOut(Location::ConstantLocation(constant));
3432}
3433
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003434void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003435 // Will be generated at use site.
3436}
3437
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003438void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3439 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3440 locations->SetOut(Location::ConstantLocation(constant));
3441}
3442
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003443void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003444 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003445}
3446
Calin Juravle175dc732015-08-25 15:42:32 +01003447void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3448 // The trampoline uses the same calling convention as dex calling conventions,
3449 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3450 // the method_idx.
3451 HandleInvoke(invoke);
3452}
3453
3454void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3455 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3456}
3457
Alexandre Rames5319def2014-10-23 10:03:10 +01003458void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003459 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003460 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003461}
3462
Alexandre Rames67555f72014-11-18 10:55:16 +00003463void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3464 HandleInvoke(invoke);
3465}
3466
3467void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3468 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003469 LocationSummary* locations = invoke->GetLocations();
3470 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003471 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003472 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003473 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003474
3475 // The register ip1 is required to be used for the hidden argument in
3476 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003477 MacroAssembler* masm = GetVIXLAssembler();
3478 UseScratchRegisterScope scratch_scope(masm);
3479 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003480 scratch_scope.Exclude(ip1);
3481 __ Mov(ip1, invoke->GetDexMethodIndex());
3482
Alexandre Rames67555f72014-11-18 10:55:16 +00003483 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003484 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003485 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003486 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003487 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003488 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003489 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003490 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003491 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003492 // Instead of simply (possibly) unpoisoning `temp` here, we should
3493 // emit a read barrier for the previous class reference load.
3494 // However this is not required in practice, as this is an
3495 // intermediate/temporary reference and because the current
3496 // concurrent copying collector keeps the from-space memory
3497 // intact/accessible until the end of the marking phase (the
3498 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003499 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003500 __ Ldr(temp,
3501 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3502 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003503 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003504 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003505 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003506 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003507 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003508 // lr();
3509 __ Blr(lr);
3510 DCHECK(!codegen_->IsLeafMethod());
3511 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3512}
3513
3514void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003515 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3516 if (intrinsic.TryDispatch(invoke)) {
3517 return;
3518 }
3519
Alexandre Rames67555f72014-11-18 10:55:16 +00003520 HandleInvoke(invoke);
3521}
3522
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003523void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003524 // Explicit clinit checks triggered by static invokes must have been pruned by
3525 // art::PrepareForRegisterAllocation.
3526 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003527
Andreas Gampe878d58c2015-01-15 23:24:00 -08003528 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3529 if (intrinsic.TryDispatch(invoke)) {
3530 return;
3531 }
3532
Alexandre Rames67555f72014-11-18 10:55:16 +00003533 HandleInvoke(invoke);
3534}
3535
Andreas Gampe878d58c2015-01-15 23:24:00 -08003536static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3537 if (invoke->GetLocations()->Intrinsified()) {
3538 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3539 intrinsic.Dispatch(invoke);
3540 return true;
3541 }
3542 return false;
3543}
3544
Vladimir Markodc151b22015-10-15 18:02:30 +01003545HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3546 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3547 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003548 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003549 return desired_dispatch_info;
3550}
3551
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003552void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003553 // For better instruction scheduling we load the direct code pointer before the method pointer.
3554 bool direct_code_loaded = false;
3555 switch (invoke->GetCodePtrLocation()) {
3556 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3557 // LR = code address from literal pool with link-time patch.
3558 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3559 direct_code_loaded = true;
3560 break;
3561 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3562 // LR = invoke->GetDirectCodePtr();
3563 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3564 direct_code_loaded = true;
3565 break;
3566 default:
3567 break;
3568 }
3569
Andreas Gampe878d58c2015-01-15 23:24:00 -08003570 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003571 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3572 switch (invoke->GetMethodLoadKind()) {
3573 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3574 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003575 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003576 break;
3577 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003578 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003579 break;
3580 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3581 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003582 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003583 break;
3584 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3585 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003586 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003587 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3588 break;
3589 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3590 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003591 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3592 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003593 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003594 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003595 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003596 __ Bind(adrp_label);
3597 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003598 }
Vladimir Marko58155012015-08-19 12:49:41 +00003599 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003600 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003601 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003602 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003603 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003604 __ Bind(ldr_label);
3605 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003606 }
Vladimir Marko58155012015-08-19 12:49:41 +00003607 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003608 }
Vladimir Marko58155012015-08-19 12:49:41 +00003609 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003610 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003611 Register reg = XRegisterFrom(temp);
3612 Register method_reg;
3613 if (current_method.IsRegister()) {
3614 method_reg = XRegisterFrom(current_method);
3615 } else {
3616 DCHECK(invoke->GetLocations()->Intrinsified());
3617 DCHECK(!current_method.IsValid());
3618 method_reg = reg;
3619 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3620 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003621
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003622 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003623 __ Ldr(reg.X(),
3624 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07003625 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003626 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003627 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3628 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003629 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3630 break;
3631 }
3632 }
3633
3634 switch (invoke->GetCodePtrLocation()) {
3635 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3636 __ Bl(&frame_entry_label_);
3637 break;
3638 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3639 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003640 vixl::aarch64::Label* label = &relative_call_patches_.back().label;
3641 SingleEmissionCheckScope guard(GetVIXLAssembler());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003642 __ Bind(label);
3643 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003644 break;
3645 }
3646 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3647 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3648 // LR prepared above for better instruction scheduling.
3649 DCHECK(direct_code_loaded);
3650 // lr()
3651 __ Blr(lr);
3652 break;
3653 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3654 // LR = callee_method->entry_point_from_quick_compiled_code_;
3655 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003656 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07003657 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003658 // lr()
3659 __ Blr(lr);
3660 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003661 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003662
Andreas Gampe878d58c2015-01-15 23:24:00 -08003663 DCHECK(!IsLeafMethod());
3664}
3665
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003666void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003667 // Use the calling convention instead of the location of the receiver, as
3668 // intrinsics may have put the receiver in a different register. In the intrinsics
3669 // slow path, the arguments have been moved to the right place, so here we are
3670 // guaranteed that the receiver is the first register of the calling convention.
3671 InvokeDexCallingConvention calling_convention;
3672 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003673 Register temp = XRegisterFrom(temp_in);
3674 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3675 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3676 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003677 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003678
3679 BlockPoolsScope block_pools(GetVIXLAssembler());
3680
3681 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003682 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003683 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003684 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003685 // Instead of simply (possibly) unpoisoning `temp` here, we should
3686 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003687 // intermediate/temporary reference and because the current
3688 // concurrent copying collector keeps the from-space memory
3689 // intact/accessible until the end of the marking phase (the
3690 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003691 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3692 // temp = temp->GetMethodAt(method_offset);
3693 __ Ldr(temp, MemOperand(temp, method_offset));
3694 // lr = temp->GetEntryPoint();
3695 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3696 // lr();
3697 __ Blr(lr);
3698}
3699
Scott Wakeling97c72b72016-06-24 16:19:36 +01003700vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
3701 const DexFile& dex_file,
3702 uint32_t string_index,
3703 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003704 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3705}
3706
Scott Wakeling97c72b72016-06-24 16:19:36 +01003707vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
3708 const DexFile& dex_file,
3709 uint32_t type_index,
3710 vixl::aarch64::Label* adrp_label) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003711 return NewPcRelativePatch(dex_file, type_index, adrp_label, &pc_relative_type_patches_);
3712}
3713
Scott Wakeling97c72b72016-06-24 16:19:36 +01003714vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
3715 const DexFile& dex_file,
3716 uint32_t element_offset,
3717 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003718 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3719}
3720
Scott Wakeling97c72b72016-06-24 16:19:36 +01003721vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
3722 const DexFile& dex_file,
3723 uint32_t offset_or_index,
3724 vixl::aarch64::Label* adrp_label,
3725 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003726 // Add a patch entry and return the label.
3727 patches->emplace_back(dex_file, offset_or_index);
3728 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003729 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003730 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3731 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3732 return label;
3733}
3734
Scott Wakeling97c72b72016-06-24 16:19:36 +01003735vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003736 const DexFile& dex_file, uint32_t string_index) {
3737 return boot_image_string_patches_.GetOrCreate(
3738 StringReference(&dex_file, string_index),
3739 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3740}
3741
Scott Wakeling97c72b72016-06-24 16:19:36 +01003742vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003743 const DexFile& dex_file, uint32_t type_index) {
3744 return boot_image_type_patches_.GetOrCreate(
3745 TypeReference(&dex_file, type_index),
3746 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3747}
3748
Scott Wakeling97c72b72016-06-24 16:19:36 +01003749vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
3750 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003751 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3752 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3753 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3754}
3755
Scott Wakeling97c72b72016-06-24 16:19:36 +01003756vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(
3757 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003758 return DeduplicateUint64Literal(address);
3759}
3760
Vladimir Marko58155012015-08-19 12:49:41 +00003761void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3762 DCHECK(linker_patches->empty());
3763 size_t size =
3764 method_patches_.size() +
3765 call_patches_.size() +
3766 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003767 pc_relative_dex_cache_patches_.size() +
3768 boot_image_string_patches_.size() +
3769 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003770 boot_image_type_patches_.size() +
3771 pc_relative_type_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003772 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003773 linker_patches->reserve(size);
3774 for (const auto& entry : method_patches_) {
3775 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003776 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3777 linker_patches->push_back(LinkerPatch::MethodPatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003778 target_method.dex_file,
3779 target_method.dex_method_index));
3780 }
3781 for (const auto& entry : call_patches_) {
3782 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003783 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3784 linker_patches->push_back(LinkerPatch::CodePatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003785 target_method.dex_file,
3786 target_method.dex_method_index));
3787 }
Scott Wakeling97c72b72016-06-24 16:19:36 +01003788 for (const MethodPatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) {
3789 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003790 info.target_method.dex_file,
3791 info.target_method.dex_method_index));
3792 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003793 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003794 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003795 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003796 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003797 info.offset_or_index));
3798 }
3799 for (const auto& entry : boot_image_string_patches_) {
3800 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003801 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3802 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003803 target_string.dex_file,
3804 target_string.string_index));
3805 }
3806 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003807 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003808 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003809 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003810 info.offset_or_index));
3811 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003812 for (const auto& entry : boot_image_type_patches_) {
3813 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003814 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3815 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003816 target_type.dex_file,
3817 target_type.type_index));
3818 }
3819 for (const PcRelativePatchInfo& info : pc_relative_type_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003820 linker_patches->push_back(LinkerPatch::RelativeTypePatch(info.label.GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003821 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003822 info.pc_insn_label->GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003823 info.offset_or_index));
3824 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003825 for (const auto& entry : boot_image_address_patches_) {
3826 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003827 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3828 linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003829 }
3830}
3831
Scott Wakeling97c72b72016-06-24 16:19:36 +01003832vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003833 Uint32ToLiteralMap* map) {
3834 return map->GetOrCreate(
3835 value,
3836 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3837}
3838
Scott Wakeling97c72b72016-06-24 16:19:36 +01003839vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003840 return uint64_literals_.GetOrCreate(
3841 value,
3842 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003843}
3844
Scott Wakeling97c72b72016-06-24 16:19:36 +01003845vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003846 MethodReference target_method,
3847 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003848 return map->GetOrCreate(
3849 target_method,
3850 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003851}
3852
Scott Wakeling97c72b72016-06-24 16:19:36 +01003853vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003854 MethodReference target_method) {
3855 return DeduplicateMethodLiteral(target_method, &method_patches_);
3856}
3857
Scott Wakeling97c72b72016-06-24 16:19:36 +01003858vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003859 MethodReference target_method) {
3860 return DeduplicateMethodLiteral(target_method, &call_patches_);
3861}
3862
3863
Andreas Gampe878d58c2015-01-15 23:24:00 -08003864void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003865 // Explicit clinit checks triggered by static invokes must have been pruned by
3866 // art::PrepareForRegisterAllocation.
3867 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003868
Andreas Gampe878d58c2015-01-15 23:24:00 -08003869 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3870 return;
3871 }
3872
Alexandre Ramesd921d642015-04-16 15:07:16 +01003873 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003874 LocationSummary* locations = invoke->GetLocations();
3875 codegen_->GenerateStaticOrDirectCall(
3876 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003877 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003878}
3879
3880void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003881 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3882 return;
3883 }
3884
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003885 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003886 DCHECK(!codegen_->IsLeafMethod());
3887 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3888}
3889
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003890HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
3891 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003892 switch (desired_class_load_kind) {
3893 case HLoadClass::LoadKind::kReferrersClass:
3894 break;
3895 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3896 DCHECK(!GetCompilerOptions().GetCompilePic());
3897 break;
3898 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3899 DCHECK(GetCompilerOptions().GetCompilePic());
3900 break;
3901 case HLoadClass::LoadKind::kBootImageAddress:
3902 break;
3903 case HLoadClass::LoadKind::kDexCacheAddress:
3904 DCHECK(Runtime::Current()->UseJitCompilation());
3905 break;
3906 case HLoadClass::LoadKind::kDexCachePcRelative:
3907 DCHECK(!Runtime::Current()->UseJitCompilation());
3908 break;
3909 case HLoadClass::LoadKind::kDexCacheViaMethod:
3910 break;
3911 }
3912 return desired_class_load_kind;
3913}
3914
Alexandre Rames67555f72014-11-18 10:55:16 +00003915void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003916 if (cls->NeedsAccessCheck()) {
3917 InvokeRuntimeCallingConvention calling_convention;
3918 CodeGenerator::CreateLoadClassLocationSummary(
3919 cls,
3920 LocationFrom(calling_convention.GetRegisterAt(0)),
Scott Wakeling97c72b72016-06-24 16:19:36 +01003921 LocationFrom(vixl::aarch64::x0),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003922 /* code_generator_supports_read_barrier */ true);
3923 return;
3924 }
3925
Mathieu Chartier31b12e32016-09-02 17:11:57 -07003926 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
3927 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003928 ? LocationSummary::kCallOnSlowPath
3929 : LocationSummary::kNoCall;
3930 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07003931 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003932 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003933 }
3934
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003935 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3936 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3937 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3938 locations->SetInAt(0, Location::RequiresRegister());
3939 }
3940 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00003941}
3942
3943void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003944 if (cls->NeedsAccessCheck()) {
3945 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
Serban Constantinescu22f81d32016-02-18 16:06:31 +00003946 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003947 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003948 return;
3949 }
3950
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003951 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01003952 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00003953
Mathieu Chartier31b12e32016-09-02 17:11:57 -07003954 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003955 bool generate_null_check = false;
3956 switch (cls->GetLoadKind()) {
3957 case HLoadClass::LoadKind::kReferrersClass: {
3958 DCHECK(!cls->CanCallRuntime());
3959 DCHECK(!cls->MustGenerateClinitCheck());
3960 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3961 Register current_method = InputRegisterAt(cls, 0);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07003962 GenerateGcRootFieldLoad(cls,
3963 out_loc,
3964 current_method,
3965 ArtMethod::DeclaringClassOffset().Int32Value(),
3966 /*fixup_label*/ nullptr,
3967 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003968 break;
3969 }
3970 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
Mathieu Chartier31b12e32016-09-02 17:11:57 -07003971 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003972 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
3973 cls->GetTypeIndex()));
3974 break;
3975 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07003976 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003977 // Add ADRP with its PC-relative type patch.
3978 const DexFile& dex_file = cls->GetDexFile();
3979 uint32_t type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003980 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003981 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003982 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003983 __ Bind(adrp_label);
3984 __ adrp(out.X(), /* offset placeholder */ 0);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00003985 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003986 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003987 vixl::aarch64::Label* add_label =
3988 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003989 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003990 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003991 __ Bind(add_label);
3992 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00003993 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003994 break;
3995 }
3996 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07003997 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003998 DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
3999 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
4000 break;
4001 }
4002 case HLoadClass::LoadKind::kDexCacheAddress: {
4003 DCHECK_NE(cls->GetAddress(), 0u);
4004 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4005 // that gives a 16KiB range. To try and reduce the number of literals if we load
4006 // multiple types, simply split the dex cache address to a 16KiB aligned base
4007 // loaded from a literal and the remaining offset embedded in the load.
4008 static_assert(sizeof(GcRoot<mirror::Class>) == 4u, "Expected GC root to be 4 bytes.");
4009 DCHECK_ALIGNED(cls->GetAddress(), 4u);
4010 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4011 uint64_t base_address = cls->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4012 uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits);
4013 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4014 // /* GcRoot<mirror::Class> */ out = *(base_address + offset)
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004015 GenerateGcRootFieldLoad(cls,
4016 out_loc,
4017 out.X(),
4018 offset,
4019 /*fixup_label*/ nullptr,
4020 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004021 generate_null_check = !cls->IsInDexCache();
4022 break;
4023 }
4024 case HLoadClass::LoadKind::kDexCachePcRelative: {
4025 // Add ADRP with its PC-relative DexCache access patch.
4026 const DexFile& dex_file = cls->GetDexFile();
4027 uint32_t element_offset = cls->GetDexCacheElementOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004028 vixl::aarch64::Label* adrp_label =
4029 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004030 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004031 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004032 __ Bind(adrp_label);
4033 __ adrp(out.X(), /* offset placeholder */ 0);
4034 }
4035 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004036 vixl::aarch64::Label* ldr_label =
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004037 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4038 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004039 GenerateGcRootFieldLoad(cls,
4040 out_loc,
4041 out.X(),
4042 /* offset placeholder */ 0,
4043 ldr_label,
4044 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004045 generate_null_check = !cls->IsInDexCache();
4046 break;
4047 }
4048 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4049 MemberOffset resolved_types_offset =
4050 ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
4051 // /* GcRoot<mirror::Class>[] */ out =
4052 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4053 Register current_method = InputRegisterAt(cls, 0);
4054 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
4055 // /* GcRoot<mirror::Class> */ out = out[type_index]
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004056 GenerateGcRootFieldLoad(cls,
4057 out_loc,
4058 out.X(),
4059 CodeGenerator::GetCacheOffset(cls->GetTypeIndex()),
4060 /*fixup_label*/ nullptr,
4061 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004062 generate_null_check = !cls->IsInDexCache();
4063 break;
4064 }
4065 }
4066
4067 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4068 DCHECK(cls->CanCallRuntime());
4069 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4070 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4071 codegen_->AddSlowPath(slow_path);
4072 if (generate_null_check) {
4073 __ Cbz(out, slow_path->GetEntryLabel());
4074 }
4075 if (cls->MustGenerateClinitCheck()) {
4076 GenerateClassInitializationCheck(slow_path, out);
4077 } else {
4078 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004079 }
4080 }
4081}
4082
David Brazdilcb1c0552015-08-04 16:22:25 +01004083static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004084 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004085}
4086
Alexandre Rames67555f72014-11-18 10:55:16 +00004087void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4088 LocationSummary* locations =
4089 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4090 locations->SetOut(Location::RequiresRegister());
4091}
4092
4093void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004094 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4095}
4096
4097void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4098 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4099}
4100
4101void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4102 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004103}
4104
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004105HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4106 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004107 switch (desired_string_load_kind) {
4108 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4109 DCHECK(!GetCompilerOptions().GetCompilePic());
4110 break;
4111 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4112 DCHECK(GetCompilerOptions().GetCompilePic());
4113 break;
4114 case HLoadString::LoadKind::kBootImageAddress:
4115 break;
4116 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01004117 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004118 break;
4119 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01004120 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004121 break;
4122 case HLoadString::LoadKind::kDexCacheViaMethod:
4123 break;
4124 }
4125 return desired_string_load_kind;
4126}
4127
Alexandre Rames67555f72014-11-18 10:55:16 +00004128void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004129 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
4130 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004131 : LocationSummary::kNoCall;
4132 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004133 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4134 locations->SetInAt(0, Location::RequiresRegister());
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004135 InvokeRuntimeCallingConvention calling_convention;
4136 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
4137 } else {
4138 locations->SetOut(Location::RequiresRegister());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004139 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004140}
4141
4142void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004143 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004144
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004145 switch (load->GetLoadKind()) {
4146 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004147 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4148 load->GetStringIndex()));
4149 return; // No dex cache slow path.
4150 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004151 // Add ADRP with its PC-relative String patch.
4152 const DexFile& dex_file = load->GetDexFile();
4153 uint32_t string_index = load->GetStringIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004154 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004155 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004156 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004157 __ Bind(adrp_label);
4158 __ adrp(out.X(), /* offset placeholder */ 0);
4159 }
4160 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004161 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004162 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4163 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004164 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004165 __ Bind(add_label);
4166 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4167 }
4168 return; // No dex cache slow path.
4169 }
4170 case HLoadString::LoadKind::kBootImageAddress: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004171 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4172 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4173 return; // No dex cache slow path.
4174 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004175 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004176 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004177 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004178
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004179 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004180 InvokeRuntimeCallingConvention calling_convention;
4181 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex());
4182 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
4183 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004184}
4185
Alexandre Rames5319def2014-10-23 10:03:10 +01004186void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4187 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4188 locations->SetOut(Location::ConstantLocation(constant));
4189}
4190
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004191void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004192 // Will be generated at use site.
4193}
4194
Alexandre Rames67555f72014-11-18 10:55:16 +00004195void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4196 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004197 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004198 InvokeRuntimeCallingConvention calling_convention;
4199 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4200}
4201
4202void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004203 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject: kQuickUnlockObject,
4204 instruction,
4205 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004206 if (instruction->IsEnter()) {
4207 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4208 } else {
4209 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4210 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004211}
4212
Alexandre Rames42d641b2014-10-27 14:00:51 +00004213void LocationsBuilderARM64::VisitMul(HMul* mul) {
4214 LocationSummary* locations =
4215 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4216 switch (mul->GetResultType()) {
4217 case Primitive::kPrimInt:
4218 case Primitive::kPrimLong:
4219 locations->SetInAt(0, Location::RequiresRegister());
4220 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004221 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004222 break;
4223
4224 case Primitive::kPrimFloat:
4225 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004226 locations->SetInAt(0, Location::RequiresFpuRegister());
4227 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004228 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004229 break;
4230
4231 default:
4232 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4233 }
4234}
4235
4236void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4237 switch (mul->GetResultType()) {
4238 case Primitive::kPrimInt:
4239 case Primitive::kPrimLong:
4240 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4241 break;
4242
4243 case Primitive::kPrimFloat:
4244 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004245 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004246 break;
4247
4248 default:
4249 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4250 }
4251}
4252
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004253void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4254 LocationSummary* locations =
4255 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4256 switch (neg->GetResultType()) {
4257 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004258 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004259 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004260 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004261 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004262
4263 case Primitive::kPrimFloat:
4264 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004265 locations->SetInAt(0, Location::RequiresFpuRegister());
4266 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004267 break;
4268
4269 default:
4270 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4271 }
4272}
4273
4274void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4275 switch (neg->GetResultType()) {
4276 case Primitive::kPrimInt:
4277 case Primitive::kPrimLong:
4278 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4279 break;
4280
4281 case Primitive::kPrimFloat:
4282 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004283 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004284 break;
4285
4286 default:
4287 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4288 }
4289}
4290
4291void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4292 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004293 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004294 InvokeRuntimeCallingConvention calling_convention;
4295 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004296 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004297 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004298 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004299}
4300
4301void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4302 LocationSummary* locations = instruction->GetLocations();
4303 InvokeRuntimeCallingConvention calling_convention;
4304 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4305 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004306 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004307 // Note: if heap poisoning is enabled, the entry point takes cares
4308 // of poisoning the reference.
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004309 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Mathieu Chartiere401d142015-04-22 13:56:20 -07004310 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004311}
4312
Alexandre Rames5319def2014-10-23 10:03:10 +01004313void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4314 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004315 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004316 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004317 if (instruction->IsStringAlloc()) {
4318 locations->AddTemp(LocationFrom(kArtMethodRegister));
4319 } else {
4320 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4321 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4322 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004323 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4324}
4325
4326void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004327 // Note: if heap poisoning is enabled, the entry point takes cares
4328 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004329 if (instruction->IsStringAlloc()) {
4330 // String is allocated through StringFactory. Call NewEmptyString entry point.
4331 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07004332 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004333 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4334 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4335 __ Blr(lr);
4336 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4337 } else {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004338 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00004339 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4340 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004341}
4342
4343void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4344 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004345 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004346 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004347}
4348
4349void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004350 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004351 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004352 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004353 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004354 break;
4355
4356 default:
4357 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4358 }
4359}
4360
David Brazdil66d126e2015-04-03 16:02:44 +01004361void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4362 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4363 locations->SetInAt(0, Location::RequiresRegister());
4364 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4365}
4366
4367void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004368 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01004369}
4370
Alexandre Rames5319def2014-10-23 10:03:10 +01004371void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004372 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4373 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01004374}
4375
Calin Juravle2ae48182016-03-16 14:05:09 +00004376void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4377 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004378 return;
4379 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004380
Alexandre Ramesd921d642015-04-16 15:07:16 +01004381 BlockPoolsScope block_pools(GetVIXLAssembler());
4382 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004383 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004384 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004385}
4386
Calin Juravle2ae48182016-03-16 14:05:09 +00004387void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004388 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004389 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004390
4391 LocationSummary* locations = instruction->GetLocations();
4392 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004393
4394 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004395}
4396
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004397void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004398 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004399}
4400
Alexandre Rames67555f72014-11-18 10:55:16 +00004401void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4402 HandleBinaryOp(instruction);
4403}
4404
4405void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4406 HandleBinaryOp(instruction);
4407}
4408
Alexandre Rames3e69f162014-12-10 10:36:50 +00004409void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4410 LOG(FATAL) << "Unreachable";
4411}
4412
4413void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4414 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4415}
4416
Alexandre Rames5319def2014-10-23 10:03:10 +01004417void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4418 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4419 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4420 if (location.IsStackSlot()) {
4421 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4422 } else if (location.IsDoubleStackSlot()) {
4423 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4424 }
4425 locations->SetOut(location);
4426}
4427
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004428void InstructionCodeGeneratorARM64::VisitParameterValue(
4429 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004430 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004431}
4432
4433void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4434 LocationSummary* locations =
4435 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004436 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004437}
4438
4439void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4440 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4441 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004442}
4443
4444void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4445 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004446 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004447 locations->SetInAt(i, Location::Any());
4448 }
4449 locations->SetOut(Location::Any());
4450}
4451
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004452void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004453 LOG(FATAL) << "Unreachable";
4454}
4455
Serban Constantinescu02164b32014-11-13 14:05:07 +00004456void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004457 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004458 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004459 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
4460 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004461 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4462
4463 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004464 case Primitive::kPrimInt:
4465 case Primitive::kPrimLong:
4466 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004467 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004468 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4469 break;
4470
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004471 case Primitive::kPrimFloat:
4472 case Primitive::kPrimDouble: {
4473 InvokeRuntimeCallingConvention calling_convention;
4474 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4475 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4476 locations->SetOut(calling_convention.GetReturnLocation(type));
4477
4478 break;
4479 }
4480
Serban Constantinescu02164b32014-11-13 14:05:07 +00004481 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004482 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004483 }
4484}
4485
4486void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4487 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004488
Serban Constantinescu02164b32014-11-13 14:05:07 +00004489 switch (type) {
4490 case Primitive::kPrimInt:
4491 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004492 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004493 break;
4494 }
4495
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004496 case Primitive::kPrimFloat:
4497 case Primitive::kPrimDouble: {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004498 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
4499 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004500 if (type == Primitive::kPrimFloat) {
4501 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4502 } else {
4503 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4504 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004505 break;
4506 }
4507
Serban Constantinescu02164b32014-11-13 14:05:07 +00004508 default:
4509 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004510 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004511 }
4512}
4513
Calin Juravle27df7582015-04-17 19:12:31 +01004514void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4515 memory_barrier->SetLocations(nullptr);
4516}
4517
4518void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004519 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004520}
4521
Alexandre Rames5319def2014-10-23 10:03:10 +01004522void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4523 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4524 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004525 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004526}
4527
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004528void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004529 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004530}
4531
4532void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4533 instruction->SetLocations(nullptr);
4534}
4535
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004536void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004537 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004538}
4539
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004540void LocationsBuilderARM64::VisitRor(HRor* ror) {
4541 HandleBinaryOp(ror);
4542}
4543
4544void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4545 HandleBinaryOp(ror);
4546}
4547
Serban Constantinescu02164b32014-11-13 14:05:07 +00004548void LocationsBuilderARM64::VisitShl(HShl* shl) {
4549 HandleShift(shl);
4550}
4551
4552void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4553 HandleShift(shl);
4554}
4555
4556void LocationsBuilderARM64::VisitShr(HShr* shr) {
4557 HandleShift(shr);
4558}
4559
4560void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4561 HandleShift(shr);
4562}
4563
Alexandre Rames5319def2014-10-23 10:03:10 +01004564void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004565 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004566}
4567
4568void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004569 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004570}
4571
Alexandre Rames67555f72014-11-18 10:55:16 +00004572void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004573 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004574}
4575
4576void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004577 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004578}
4579
4580void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004581 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004582}
4583
Alexandre Rames67555f72014-11-18 10:55:16 +00004584void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004585 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004586}
4587
Calin Juravlee460d1d2015-09-29 04:52:17 +01004588void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4589 HUnresolvedInstanceFieldGet* instruction) {
4590 FieldAccessCallingConventionARM64 calling_convention;
4591 codegen_->CreateUnresolvedFieldLocationSummary(
4592 instruction, instruction->GetFieldType(), calling_convention);
4593}
4594
4595void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4596 HUnresolvedInstanceFieldGet* instruction) {
4597 FieldAccessCallingConventionARM64 calling_convention;
4598 codegen_->GenerateUnresolvedFieldAccess(instruction,
4599 instruction->GetFieldType(),
4600 instruction->GetFieldIndex(),
4601 instruction->GetDexPc(),
4602 calling_convention);
4603}
4604
4605void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4606 HUnresolvedInstanceFieldSet* instruction) {
4607 FieldAccessCallingConventionARM64 calling_convention;
4608 codegen_->CreateUnresolvedFieldLocationSummary(
4609 instruction, instruction->GetFieldType(), calling_convention);
4610}
4611
4612void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4613 HUnresolvedInstanceFieldSet* instruction) {
4614 FieldAccessCallingConventionARM64 calling_convention;
4615 codegen_->GenerateUnresolvedFieldAccess(instruction,
4616 instruction->GetFieldType(),
4617 instruction->GetFieldIndex(),
4618 instruction->GetDexPc(),
4619 calling_convention);
4620}
4621
4622void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4623 HUnresolvedStaticFieldGet* instruction) {
4624 FieldAccessCallingConventionARM64 calling_convention;
4625 codegen_->CreateUnresolvedFieldLocationSummary(
4626 instruction, instruction->GetFieldType(), calling_convention);
4627}
4628
4629void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4630 HUnresolvedStaticFieldGet* instruction) {
4631 FieldAccessCallingConventionARM64 calling_convention;
4632 codegen_->GenerateUnresolvedFieldAccess(instruction,
4633 instruction->GetFieldType(),
4634 instruction->GetFieldIndex(),
4635 instruction->GetDexPc(),
4636 calling_convention);
4637}
4638
4639void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4640 HUnresolvedStaticFieldSet* instruction) {
4641 FieldAccessCallingConventionARM64 calling_convention;
4642 codegen_->CreateUnresolvedFieldLocationSummary(
4643 instruction, instruction->GetFieldType(), calling_convention);
4644}
4645
4646void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4647 HUnresolvedStaticFieldSet* instruction) {
4648 FieldAccessCallingConventionARM64 calling_convention;
4649 codegen_->GenerateUnresolvedFieldAccess(instruction,
4650 instruction->GetFieldType(),
4651 instruction->GetFieldIndex(),
4652 instruction->GetDexPc(),
4653 calling_convention);
4654}
4655
Alexandre Rames5319def2014-10-23 10:03:10 +01004656void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01004657 LocationSummary* locations =
4658 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01004659 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexandre Rames5319def2014-10-23 10:03:10 +01004660}
4661
4662void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004663 HBasicBlock* block = instruction->GetBlock();
4664 if (block->GetLoopInformation() != nullptr) {
4665 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4666 // The back edge will generate the suspend check.
4667 return;
4668 }
4669 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4670 // The goto will generate the suspend check.
4671 return;
4672 }
4673 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004674}
4675
Alexandre Rames67555f72014-11-18 10:55:16 +00004676void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4677 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004678 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004679 InvokeRuntimeCallingConvention calling_convention;
4680 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4681}
4682
4683void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004684 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004685 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004686}
4687
4688void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4689 LocationSummary* locations =
4690 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4691 Primitive::Type input_type = conversion->GetInputType();
4692 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004693 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004694 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4695 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4696 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4697 }
4698
Alexandre Rames542361f2015-01-29 16:57:31 +00004699 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004700 locations->SetInAt(0, Location::RequiresFpuRegister());
4701 } else {
4702 locations->SetInAt(0, Location::RequiresRegister());
4703 }
4704
Alexandre Rames542361f2015-01-29 16:57:31 +00004705 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004706 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4707 } else {
4708 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4709 }
4710}
4711
4712void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4713 Primitive::Type result_type = conversion->GetResultType();
4714 Primitive::Type input_type = conversion->GetInputType();
4715
4716 DCHECK_NE(input_type, result_type);
4717
Alexandre Rames542361f2015-01-29 16:57:31 +00004718 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004719 int result_size = Primitive::ComponentSize(result_type);
4720 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004721 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004722 Register output = OutputRegister(conversion);
4723 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004724 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004725 // 'int' values are used directly as W registers, discarding the top
4726 // bits, so we don't need to sign-extend and can just perform a move.
4727 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4728 // top 32 bits of the target register. We theoretically could leave those
4729 // bits unchanged, but we would have to make sure that no code uses a
4730 // 32bit input value as a 64bit value assuming that the top 32 bits are
4731 // zero.
4732 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004733 } else if (result_type == Primitive::kPrimChar ||
4734 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4735 __ Ubfx(output,
4736 output.IsX() ? source.X() : source.W(),
4737 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004738 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004739 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004740 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004741 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004742 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004743 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004744 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4745 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004746 } else if (Primitive::IsFloatingPointType(result_type) &&
4747 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004748 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4749 } else {
4750 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4751 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004752 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004753}
Alexandre Rames67555f72014-11-18 10:55:16 +00004754
Serban Constantinescu02164b32014-11-13 14:05:07 +00004755void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4756 HandleShift(ushr);
4757}
4758
4759void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4760 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004761}
4762
4763void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4764 HandleBinaryOp(instruction);
4765}
4766
4767void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4768 HandleBinaryOp(instruction);
4769}
4770
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004771void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004772 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004773 LOG(FATAL) << "Unreachable";
4774}
4775
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004776void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004777 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004778 LOG(FATAL) << "Unreachable";
4779}
4780
Mark Mendellfe57faa2015-09-18 09:26:15 -04004781// Simple implementation of packed switch - generate cascaded compare/jumps.
4782void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4783 LocationSummary* locations =
4784 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4785 locations->SetInAt(0, Location::RequiresRegister());
4786}
4787
4788void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4789 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004790 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004791 Register value_reg = InputRegisterAt(switch_instr, 0);
4792 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4793
Zheng Xu3927c8b2015-11-18 17:46:25 +08004794 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004795 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08004796 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4797 // make sure we don't emit it if the target may run out of range.
4798 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4799 // ranges and emit the tables only as required.
4800 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004801
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004802 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004803 // Current instruction id is an upper bound of the number of HIRs in the graph.
4804 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4805 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004806 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4807 Register temp = temps.AcquireW();
4808 __ Subs(temp, value_reg, Operand(lower_bound));
4809
Zheng Xu3927c8b2015-11-18 17:46:25 +08004810 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004811 // Jump to successors[0] if value == lower_bound.
4812 __ B(eq, codegen_->GetLabelOf(successors[0]));
4813 int32_t last_index = 0;
4814 for (; num_entries - last_index > 2; last_index += 2) {
4815 __ Subs(temp, temp, Operand(2));
4816 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4817 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4818 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4819 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4820 }
4821 if (num_entries - last_index == 2) {
4822 // The last missing case_value.
4823 __ Cmp(temp, Operand(1));
4824 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004825 }
4826
4827 // And the default for any other value.
4828 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4829 __ B(codegen_->GetLabelOf(default_block));
4830 }
4831 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004832 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004833
4834 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4835
4836 // Below instructions should use at most one blocked register. Since there are two blocked
4837 // registers, we are free to block one.
4838 Register temp_w = temps.AcquireW();
4839 Register index;
4840 // Remove the bias.
4841 if (lower_bound != 0) {
4842 index = temp_w;
4843 __ Sub(index, value_reg, Operand(lower_bound));
4844 } else {
4845 index = value_reg;
4846 }
4847
4848 // Jump to default block if index is out of the range.
4849 __ Cmp(index, Operand(num_entries));
4850 __ B(hs, codegen_->GetLabelOf(default_block));
4851
4852 // In current VIXL implementation, it won't require any blocked registers to encode the
4853 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4854 // register pressure.
4855 Register table_base = temps.AcquireX();
4856 // Load jump offset from the table.
4857 __ Adr(table_base, jump_table->GetTableStartLabel());
4858 Register jump_offset = temp_w;
4859 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4860
4861 // Jump to target block by branching to table_base(pc related) + offset.
4862 Register target_address = table_base;
4863 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4864 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004865 }
4866}
4867
Roland Levillain44015862016-01-22 11:47:17 +00004868void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4869 Location out,
4870 uint32_t offset,
4871 Location maybe_temp) {
4872 Primitive::Type type = Primitive::kPrimNot;
4873 Register out_reg = RegisterFrom(out, type);
4874 if (kEmitCompilerReadBarrier) {
4875 Register temp_reg = RegisterFrom(maybe_temp, type);
4876 if (kUseBakerReadBarrier) {
4877 // Load with fast path based Baker's read barrier.
4878 // /* HeapReference<Object> */ out = *(out + offset)
4879 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4880 out,
4881 out_reg,
4882 offset,
4883 temp_reg,
4884 /* needs_null_check */ false,
4885 /* use_load_acquire */ false);
4886 } else {
4887 // Load with slow path based read barrier.
4888 // Save the value of `out` into `maybe_temp` before overwriting it
4889 // in the following move operation, as we will need it for the
4890 // read barrier below.
4891 __ Mov(temp_reg, out_reg);
4892 // /* HeapReference<Object> */ out = *(out + offset)
4893 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4894 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4895 }
4896 } else {
4897 // Plain load with no read barrier.
4898 // /* HeapReference<Object> */ out = *(out + offset)
4899 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4900 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4901 }
4902}
4903
4904void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
4905 Location out,
4906 Location obj,
4907 uint32_t offset,
4908 Location maybe_temp) {
4909 Primitive::Type type = Primitive::kPrimNot;
4910 Register out_reg = RegisterFrom(out, type);
4911 Register obj_reg = RegisterFrom(obj, type);
4912 if (kEmitCompilerReadBarrier) {
4913 if (kUseBakerReadBarrier) {
4914 // Load with fast path based Baker's read barrier.
4915 Register temp_reg = RegisterFrom(maybe_temp, type);
4916 // /* HeapReference<Object> */ out = *(obj + offset)
4917 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4918 out,
4919 obj_reg,
4920 offset,
4921 temp_reg,
4922 /* needs_null_check */ false,
4923 /* use_load_acquire */ false);
4924 } else {
4925 // Load with slow path based read barrier.
4926 // /* HeapReference<Object> */ out = *(obj + offset)
4927 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4928 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4929 }
4930 } else {
4931 // Plain load with no read barrier.
4932 // /* HeapReference<Object> */ out = *(obj + offset)
4933 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4934 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4935 }
4936}
4937
4938void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
4939 Location root,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004940 Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004941 uint32_t offset,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004942 vixl::aarch64::Label* fixup_label,
4943 bool requires_read_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004944 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004945 if (requires_read_barrier) {
4946 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00004947 if (kUseBakerReadBarrier) {
4948 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4949 // Baker's read barrier are used:
4950 //
4951 // root = obj.field;
4952 // if (Thread::Current()->GetIsGcMarking()) {
4953 // root = ReadBarrier::Mark(root)
4954 // }
4955
4956 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004957 if (fixup_label == nullptr) {
4958 __ Ldr(root_reg, MemOperand(obj, offset));
4959 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004960 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004961 __ Bind(fixup_label);
4962 __ ldr(root_reg, MemOperand(obj, offset));
4963 }
Roland Levillain44015862016-01-22 11:47:17 +00004964 static_assert(
4965 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
4966 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
4967 "have different sizes.");
4968 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
4969 "art::mirror::CompressedReference<mirror::Object> and int32_t "
4970 "have different sizes.");
4971
Vladimir Marko953437b2016-08-24 08:30:46 +00004972 // Slow path marking the GC root `root`.
Roland Levillain44015862016-01-22 11:47:17 +00004973 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01004974 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root);
Roland Levillain44015862016-01-22 11:47:17 +00004975 codegen_->AddSlowPath(slow_path);
4976
4977 MacroAssembler* masm = GetVIXLAssembler();
4978 UseScratchRegisterScope temps(masm);
4979 Register temp = temps.AcquireW();
4980 // temp = Thread::Current()->GetIsGcMarking()
Andreas Gampe542451c2016-07-26 09:02:02 -07004981 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64PointerSize>().Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00004982 __ Cbnz(temp, slow_path->GetEntryLabel());
4983 __ Bind(slow_path->GetExitLabel());
4984 } else {
4985 // GC root loaded through a slow path for read barriers other
4986 // than Baker's.
4987 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004988 if (fixup_label == nullptr) {
4989 __ Add(root_reg.X(), obj.X(), offset);
4990 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004991 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004992 __ Bind(fixup_label);
4993 __ add(root_reg.X(), obj.X(), offset);
4994 }
Roland Levillain44015862016-01-22 11:47:17 +00004995 // /* mirror::Object* */ root = root->Read()
4996 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
4997 }
4998 } else {
4999 // Plain GC root load with no read barrier.
5000 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005001 if (fixup_label == nullptr) {
5002 __ Ldr(root_reg, MemOperand(obj, offset));
5003 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005004 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005005 __ Bind(fixup_label);
5006 __ ldr(root_reg, MemOperand(obj, offset));
5007 }
Roland Levillain44015862016-01-22 11:47:17 +00005008 // Note that GC roots are not affected by heap poisoning, thus we
5009 // do not have to unpoison `root_reg` here.
5010 }
5011}
5012
5013void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5014 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005015 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005016 uint32_t offset,
5017 Register temp,
5018 bool needs_null_check,
5019 bool use_load_acquire) {
5020 DCHECK(kEmitCompilerReadBarrier);
5021 DCHECK(kUseBakerReadBarrier);
5022
5023 // /* HeapReference<Object> */ ref = *(obj + offset)
5024 Location no_index = Location::NoLocation();
Roland Levillainbfea3352016-06-23 13:48:47 +01005025 size_t no_scale_factor = 0U;
5026 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5027 ref,
5028 obj,
5029 offset,
5030 no_index,
5031 no_scale_factor,
5032 temp,
5033 needs_null_check,
5034 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005035}
5036
5037void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5038 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005039 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005040 uint32_t data_offset,
5041 Location index,
5042 Register temp,
5043 bool needs_null_check) {
5044 DCHECK(kEmitCompilerReadBarrier);
5045 DCHECK(kUseBakerReadBarrier);
5046
5047 // Array cells are never volatile variables, therefore array loads
5048 // never use Load-Acquire instructions on ARM64.
5049 const bool use_load_acquire = false;
5050
Roland Levillainbfea3352016-06-23 13:48:47 +01005051 static_assert(
5052 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5053 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005054 // /* HeapReference<Object> */ ref =
5055 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005056 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5057 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5058 ref,
5059 obj,
5060 data_offset,
5061 index,
5062 scale_factor,
5063 temp,
5064 needs_null_check,
5065 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005066}
5067
5068void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5069 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005070 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005071 uint32_t offset,
5072 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005073 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005074 Register temp,
5075 bool needs_null_check,
5076 bool use_load_acquire) {
5077 DCHECK(kEmitCompilerReadBarrier);
5078 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005079 // If we are emitting an array load, we should not be using a
5080 // Load Acquire instruction. In other words:
5081 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5082 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005083
5084 MacroAssembler* masm = GetVIXLAssembler();
5085 UseScratchRegisterScope temps(masm);
5086
5087 // In slow path based read barriers, the read barrier call is
5088 // inserted after the original load. However, in fast path based
5089 // Baker's read barriers, we need to perform the load of
5090 // mirror::Object::monitor_ *before* the original reference load.
5091 // This load-load ordering is required by the read barrier.
5092 // The fast path/slow path (for Baker's algorithm) should look like:
5093 //
5094 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5095 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5096 // HeapReference<Object> ref = *src; // Original reference load.
5097 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5098 // if (is_gray) {
5099 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5100 // }
5101 //
5102 // Note: the original implementation in ReadBarrier::Barrier is
5103 // slightly more complex as it performs additional checks that we do
5104 // not do here for performance reasons.
5105
5106 Primitive::Type type = Primitive::kPrimNot;
5107 Register ref_reg = RegisterFrom(ref, type);
5108 DCHECK(obj.IsW());
5109 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5110
5111 // /* int32_t */ monitor = obj->monitor_
5112 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5113 if (needs_null_check) {
5114 MaybeRecordImplicitNullCheck(instruction);
5115 }
5116 // /* LockWord */ lock_word = LockWord(monitor)
5117 static_assert(sizeof(LockWord) == sizeof(int32_t),
5118 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005119
Vladimir Marko877a0332016-07-11 19:30:56 +01005120 // Introduce a dependency on the lock_word including rb_state,
5121 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005122 // a memory barrier (which would be more expensive).
Roland Levillain0b671c02016-08-19 12:02:34 +01005123 // `obj` is unchanged by this operation, but its value now depends
5124 // on `temp`.
Vladimir Marko877a0332016-07-11 19:30:56 +01005125 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005126
5127 // The actual reference load.
5128 if (index.IsValid()) {
Roland Levillainbfea3352016-06-23 13:48:47 +01005129 // Load types involving an "index".
5130 if (use_load_acquire) {
5131 // UnsafeGetObjectVolatile intrinsic case.
5132 // Register `index` is not an index in an object array, but an
5133 // offset to an object reference field within object `obj`.
5134 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5135 DCHECK(instruction->GetLocations()->Intrinsified());
5136 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5137 << instruction->AsInvoke()->GetIntrinsic();
5138 DCHECK_EQ(offset, 0U);
5139 DCHECK_EQ(scale_factor, 0U);
5140 DCHECK_EQ(needs_null_check, 0U);
5141 // /* HeapReference<Object> */ ref = *(obj + index)
5142 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5143 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005144 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005145 // ArrayGet and UnsafeGetObject intrinsics cases.
5146 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5147 if (index.IsConstant()) {
5148 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5149 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5150 } else {
Vladimir Marko877a0332016-07-11 19:30:56 +01005151 Register temp2 = temps.AcquireW();
Roland Levillainbfea3352016-06-23 13:48:47 +01005152 __ Add(temp2, obj, offset);
5153 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, scale_factor));
5154 temps.Release(temp2);
5155 }
Roland Levillain44015862016-01-22 11:47:17 +00005156 }
Roland Levillain44015862016-01-22 11:47:17 +00005157 } else {
5158 // /* HeapReference<Object> */ ref = *(obj + offset)
5159 MemOperand field = HeapOperand(obj, offset);
5160 if (use_load_acquire) {
5161 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5162 } else {
5163 Load(type, ref_reg, field);
5164 }
5165 }
5166
5167 // Object* ref = ref_addr->AsMirrorPtr()
5168 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5169
Vladimir Marko953437b2016-08-24 08:30:46 +00005170 // Slow path marking the object `ref` when it is gray.
Roland Levillain44015862016-01-22 11:47:17 +00005171 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005172 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
Roland Levillain44015862016-01-22 11:47:17 +00005173 AddSlowPath(slow_path);
5174
5175 // if (rb_state == ReadBarrier::gray_ptr_)
5176 // ref = ReadBarrier::Mark(ref);
Vladimir Marko877a0332016-07-11 19:30:56 +01005177 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5178 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
5179 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
5180 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
5181 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005182 __ Bind(slow_path->GetExitLabel());
5183}
5184
5185void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5186 Location out,
5187 Location ref,
5188 Location obj,
5189 uint32_t offset,
5190 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005191 DCHECK(kEmitCompilerReadBarrier);
5192
Roland Levillain44015862016-01-22 11:47:17 +00005193 // Insert a slow path based read barrier *after* the reference load.
5194 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005195 // If heap poisoning is enabled, the unpoisoning of the loaded
5196 // reference will be carried out by the runtime within the slow
5197 // path.
5198 //
5199 // Note that `ref` currently does not get unpoisoned (when heap
5200 // poisoning is enabled), which is alright as the `ref` argument is
5201 // not used by the artReadBarrierSlow entry point.
5202 //
5203 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5204 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5205 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5206 AddSlowPath(slow_path);
5207
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005208 __ B(slow_path->GetEntryLabel());
5209 __ Bind(slow_path->GetExitLabel());
5210}
5211
Roland Levillain44015862016-01-22 11:47:17 +00005212void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5213 Location out,
5214 Location ref,
5215 Location obj,
5216 uint32_t offset,
5217 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005218 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005219 // Baker's read barriers shall be handled by the fast path
5220 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5221 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005222 // If heap poisoning is enabled, unpoisoning will be taken care of
5223 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005224 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005225 } else if (kPoisonHeapReferences) {
5226 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5227 }
5228}
5229
Roland Levillain44015862016-01-22 11:47:17 +00005230void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5231 Location out,
5232 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005233 DCHECK(kEmitCompilerReadBarrier);
5234
Roland Levillain44015862016-01-22 11:47:17 +00005235 // Insert a slow path based read barrier *after* the GC root load.
5236 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005237 // Note that GC roots are not affected by heap poisoning, so we do
5238 // not need to do anything special for this here.
5239 SlowPathCodeARM64* slow_path =
5240 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5241 AddSlowPath(slow_path);
5242
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005243 __ B(slow_path->GetEntryLabel());
5244 __ Bind(slow_path->GetExitLabel());
5245}
5246
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005247void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5248 LocationSummary* locations =
5249 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5250 locations->SetInAt(0, Location::RequiresRegister());
5251 locations->SetOut(Location::RequiresRegister());
5252}
5253
5254void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5255 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005256 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005257 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005258 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005259 __ Ldr(XRegisterFrom(locations->Out()),
5260 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005261 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005262 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005263 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005264 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5265 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005266 __ Ldr(XRegisterFrom(locations->Out()),
5267 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005268 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005269}
5270
5271
5272
Alexandre Rames67555f72014-11-18 10:55:16 +00005273#undef __
5274#undef QUICK_ENTRY_POINT
5275
Alexandre Rames5319def2014-10-23 10:03:10 +01005276} // namespace arm64
5277} // namespace art