blob: 933f3e688361ba383f2cddc5985bd5a9de507047 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Rames5319def2014-10-23 10:03:10 +010037
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
Roland Levillain22ccc3a2015-11-24 13:10:05 +000044template<class MirrorType>
45class GcRoot;
46
Alexandre Rames5319def2014-10-23 10:03:10 +010047namespace arm64 {
48
Alexandre Ramesbe919d92016-08-23 18:33:36 +010049using helpers::ARM64EncodableConstantOrRegister;
50using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080051using helpers::CPURegisterFrom;
52using helpers::DRegisterFrom;
53using helpers::FPRegisterFrom;
54using helpers::HeapOperand;
55using helpers::HeapOperandFrom;
56using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010057using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080058using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080059using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080061using helpers::Int64ConstantFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010062using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080063using helpers::LocationFrom;
64using helpers::OperandFromMemOperand;
65using helpers::OutputCPURegister;
66using helpers::OutputFPRegister;
67using helpers::OutputRegister;
68using helpers::RegisterFrom;
69using helpers::StackOperandFrom;
70using helpers::VIXLRegCodeFromART;
71using helpers::WRegisterFrom;
72using helpers::XRegisterFrom;
73
Alexandre Rames5319def2014-10-23 10:03:10 +010074static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000075// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080076// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
77// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000078static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010079
Alexandre Rames5319def2014-10-23 10:03:10 +010080inline Condition ARM64Condition(IfCondition cond) {
81 switch (cond) {
82 case kCondEQ: return eq;
83 case kCondNE: return ne;
84 case kCondLT: return lt;
85 case kCondLE: return le;
86 case kCondGT: return gt;
87 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070088 case kCondB: return lo;
89 case kCondBE: return ls;
90 case kCondA: return hi;
91 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010092 }
Roland Levillain7f63c522015-07-13 15:54:55 +000093 LOG(FATAL) << "Unreachable";
94 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010095}
96
Vladimir Markod6e069b2016-01-18 11:11:01 +000097inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
98 // The ARM64 condition codes can express all the necessary branches, see the
99 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
100 // There is no dex instruction or HIR that would need the missing conditions
101 // "equal or unordered" or "not equal".
102 switch (cond) {
103 case kCondEQ: return eq;
104 case kCondNE: return ne /* unordered */;
105 case kCondLT: return gt_bias ? cc : lt /* unordered */;
106 case kCondLE: return gt_bias ? ls : le /* unordered */;
107 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
108 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
109 default:
110 LOG(FATAL) << "UNREACHABLE";
111 UNREACHABLE();
112 }
113}
114
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000116 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
117 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
118 // but we use the exact registers for clarity.
119 if (return_type == Primitive::kPrimFloat) {
120 return LocationFrom(s0);
121 } else if (return_type == Primitive::kPrimDouble) {
122 return LocationFrom(d0);
123 } else if (return_type == Primitive::kPrimLong) {
124 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100125 } else if (return_type == Primitive::kPrimVoid) {
126 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000127 } else {
128 return LocationFrom(w0);
129 }
130}
131
Alexandre Rames5319def2014-10-23 10:03:10 +0100132Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000133 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100134}
135
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100136// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
137#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700138#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100139
Zheng Xuda403092015-04-24 17:35:39 +0800140// Calculate memory accessing operand for save/restore live registers.
141static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
142 RegisterSet* register_set,
143 int64_t spill_offset,
144 bool is_save) {
145 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
146 codegen->GetNumberOfCoreRegisters(),
147 register_set->GetFloatingPointRegisters(),
148 codegen->GetNumberOfFloatingPointRegisters()));
149
150 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100151 register_set->GetCoreRegisters() & (~callee_saved_core_registers.GetList()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000152 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100153 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.GetList()));
Zheng Xuda403092015-04-24 17:35:39 +0800154
155 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
156 UseScratchRegisterScope temps(masm);
157
158 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100159 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
160 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800161 int64_t reg_size = kXRegSizeInBytes;
162 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
163 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100164 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800165 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
166 // If the offset does not fit in the instruction's immediate field, use an alternate register
167 // to compute the base address(float point registers spill base address).
168 Register new_base = temps.AcquireSameSizeAs(base);
169 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
170 base = new_base;
171 spill_offset = -core_spill_size;
172 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
173 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
174 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
175 }
176
177 if (is_save) {
178 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
179 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
180 } else {
181 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
182 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
183 }
184}
185
186void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
187 RegisterSet* register_set = locations->GetLiveRegisters();
188 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
189 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
190 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
191 // If the register holds an object, update the stack mask.
192 if (locations->RegisterContainsObject(i)) {
193 locations->SetStackBit(stack_offset / kVRegSize);
194 }
195 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
196 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
197 saved_core_stack_offsets_[i] = stack_offset;
198 stack_offset += kXRegSizeInBytes;
199 }
200 }
201
202 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
203 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
204 register_set->ContainsFloatingPointRegister(i)) {
205 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
206 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
207 saved_fpu_stack_offsets_[i] = stack_offset;
208 stack_offset += kDRegSizeInBytes;
209 }
210 }
211
212 SaveRestoreLiveRegistersHelper(codegen, register_set,
213 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
214}
215
216void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
217 RegisterSet* register_set = locations->GetLiveRegisters();
218 SaveRestoreLiveRegistersHelper(codegen, register_set,
219 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
220}
221
Alexandre Rames5319def2014-10-23 10:03:10 +0100222class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
223 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000224 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100225
Alexandre Rames67555f72014-11-18 10:55:16 +0000226 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000228 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100229
Alexandre Rames5319def2014-10-23 10:03:10 +0100230 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000231 if (instruction_->CanThrowIntoCatchBlock()) {
232 // Live registers will be restored in the catch block if caught.
233 SaveLiveRegisters(codegen, instruction_->GetLocations());
234 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000235 // We're moving two locations to locations that could overlap, so we need a parallel
236 // move resolver.
237 InvokeRuntimeCallingConvention calling_convention;
238 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100239 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
240 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000241 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
242 ? kQuickThrowStringBounds
243 : kQuickThrowArrayBounds;
244 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100245 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800246 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100247 }
248
Alexandre Rames8158f282015-08-07 10:26:17 +0100249 bool IsFatal() const OVERRIDE { return true; }
250
Alexandre Rames9931f312015-06-19 14:47:01 +0100251 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
252
Alexandre Rames5319def2014-10-23 10:03:10 +0100253 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
255};
256
Alexandre Rames67555f72014-11-18 10:55:16 +0000257class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
258 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000259 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000260
261 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
262 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
263 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000264 if (instruction_->CanThrowIntoCatchBlock()) {
265 // Live registers will be restored in the catch block if caught.
266 SaveLiveRegisters(codegen, instruction_->GetLocations());
267 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000268 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800269 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000270 }
271
Alexandre Rames8158f282015-08-07 10:26:17 +0100272 bool IsFatal() const OVERRIDE { return true; }
273
Alexandre Rames9931f312015-06-19 14:47:01 +0100274 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
275
Alexandre Rames67555f72014-11-18 10:55:16 +0000276 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000277 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
278};
279
280class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
281 public:
282 LoadClassSlowPathARM64(HLoadClass* cls,
283 HInstruction* at,
284 uint32_t dex_pc,
285 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000286 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000287 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
288 }
289
290 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
291 LocationSummary* locations = at_->GetLocations();
292 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
293
294 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000295 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000296
297 InvokeRuntimeCallingConvention calling_convention;
298 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000299 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
300 : kQuickInitializeType;
301 arm64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100303 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800304 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100305 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800306 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000307
308 // Move the class to the desired location.
309 Location out = locations->Out();
310 if (out.IsValid()) {
311 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
312 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000313 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000314 }
315
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000316 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000317 __ B(GetExitLabel());
318 }
319
Alexandre Rames9931f312015-06-19 14:47:01 +0100320 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
321
Alexandre Rames67555f72014-11-18 10:55:16 +0000322 private:
323 // The class this slow path will load.
324 HLoadClass* const cls_;
325
326 // The instruction where this slow path is happening.
327 // (Might be the load class or an initialization check).
328 HInstruction* const at_;
329
330 // The dex PC of `at_`.
331 const uint32_t dex_pc_;
332
333 // Whether to initialize the class.
334 const bool do_clinit_;
335
336 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
337};
338
339class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
340 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000341 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000342
343 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
344 LocationSummary* locations = instruction_->GetLocations();
345 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
346 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
347
348 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000349 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000350
351 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000352 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
353 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000354 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100355 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000356 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000357 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000358
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000359 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000360 __ B(GetExitLabel());
361 }
362
Alexandre Rames9931f312015-06-19 14:47:01 +0100363 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
364
Alexandre Rames67555f72014-11-18 10:55:16 +0000365 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000366 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
367};
368
Alexandre Rames5319def2014-10-23 10:03:10 +0100369class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
370 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000371 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100372
Alexandre Rames67555f72014-11-18 10:55:16 +0000373 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
374 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100375 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000376 if (instruction_->CanThrowIntoCatchBlock()) {
377 // Live registers will be restored in the catch block if caught.
378 SaveLiveRegisters(codegen, instruction_->GetLocations());
379 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000380 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
381 instruction_,
382 instruction_->GetDexPc(),
383 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800384 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100385 }
386
Alexandre Rames8158f282015-08-07 10:26:17 +0100387 bool IsFatal() const OVERRIDE { return true; }
388
Alexandre Rames9931f312015-06-19 14:47:01 +0100389 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
390
Alexandre Rames5319def2014-10-23 10:03:10 +0100391 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100392 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
393};
394
395class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
396 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100397 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000398 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100399
Alexandre Rames67555f72014-11-18 10:55:16 +0000400 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
401 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100402 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000403 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800404 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000405 if (successor_ == nullptr) {
406 __ B(GetReturnLabel());
407 } else {
408 __ B(arm64_codegen->GetLabelOf(successor_));
409 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100410 }
411
Scott Wakeling97c72b72016-06-24 16:19:36 +0100412 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100413 DCHECK(successor_ == nullptr);
414 return &return_label_;
415 }
416
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100417 HBasicBlock* GetSuccessor() const {
418 return successor_;
419 }
420
Alexandre Rames9931f312015-06-19 14:47:01 +0100421 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
422
Alexandre Rames5319def2014-10-23 10:03:10 +0100423 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100424 // If not null, the block to branch to after the suspend check.
425 HBasicBlock* const successor_;
426
427 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100428 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100429
430 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
431};
432
Alexandre Rames67555f72014-11-18 10:55:16 +0000433class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
434 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000435 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000436 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000437
438 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000439 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100440 Location class_to_check = locations->InAt(1);
441 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
442 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000443 DCHECK(instruction_->IsCheckCast()
444 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
445 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100446 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000447
Alexandre Rames67555f72014-11-18 10:55:16 +0000448 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000449
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000450 if (!is_fatal_) {
451 SaveLiveRegisters(codegen, locations);
452 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000453
454 // We're moving two locations to locations that could overlap, so we need a parallel
455 // move resolver.
456 InvokeRuntimeCallingConvention calling_convention;
457 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100458 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
459 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000460
461 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000462 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Andreas Gampe67409972016-07-19 22:34:53 -0700463 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t,
Roland Levillain888d0672015-11-23 18:53:50 +0000464 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000465 Primitive::Type ret_type = instruction_->GetType();
466 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
467 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
468 } else {
469 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000470 arm64_codegen->InvokeRuntime(kQuickCheckCast, instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800471 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000472 }
473
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000474 if (!is_fatal_) {
475 RestoreLiveRegisters(codegen, locations);
476 __ B(GetExitLabel());
477 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000478 }
479
Alexandre Rames9931f312015-06-19 14:47:01 +0100480 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000481 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100482
Alexandre Rames67555f72014-11-18 10:55:16 +0000483 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000484 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000485
Alexandre Rames67555f72014-11-18 10:55:16 +0000486 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
487};
488
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700489class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
490 public:
Aart Bik42249c32016-01-07 15:33:50 -0800491 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000492 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700493
494 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800495 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700496 __ Bind(GetEntryLabel());
497 SaveLiveRegisters(codegen, instruction_->GetLocations());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000498 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000499 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700500 }
501
Alexandre Rames9931f312015-06-19 14:47:01 +0100502 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
503
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700504 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700505 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
506};
507
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100508class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
509 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000510 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100511
512 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
513 LocationSummary* locations = instruction_->GetLocations();
514 __ Bind(GetEntryLabel());
515 SaveLiveRegisters(codegen, locations);
516
517 InvokeRuntimeCallingConvention calling_convention;
518 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
519 parallel_move.AddMove(
520 locations->InAt(0),
521 LocationFrom(calling_convention.GetRegisterAt(0)),
522 Primitive::kPrimNot,
523 nullptr);
524 parallel_move.AddMove(
525 locations->InAt(1),
526 LocationFrom(calling_convention.GetRegisterAt(1)),
527 Primitive::kPrimInt,
528 nullptr);
529 parallel_move.AddMove(
530 locations->InAt(2),
531 LocationFrom(calling_convention.GetRegisterAt(2)),
532 Primitive::kPrimNot,
533 nullptr);
534 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
535
536 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000537 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100538 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
539 RestoreLiveRegisters(codegen, locations);
540 __ B(GetExitLabel());
541 }
542
543 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
544
545 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100546 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
547};
548
Zheng Xu3927c8b2015-11-18 17:46:25 +0800549void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
550 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000551 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800552
553 // We are about to use the assembler to place literals directly. Make sure we have enough
554 // underlying code buffer and we have generated the jump table with right size.
555 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
556 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
557
558 __ Bind(&table_start_);
559 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
560 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100561 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800562 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100563 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800564 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
565 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
566 Literal<int32_t> literal(jump_offset);
567 __ place(&literal);
568 }
569}
570
Roland Levillain44015862016-01-22 11:47:17 +0000571// Slow path marking an object during a read barrier.
572class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
573 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100574 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location obj)
575 : SlowPathCodeARM64(instruction), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000576 DCHECK(kEmitCompilerReadBarrier);
577 }
578
579 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
580
581 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
582 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain44015862016-01-22 11:47:17 +0000583 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100584 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(obj_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000585 DCHECK(instruction_->IsInstanceFieldGet() ||
586 instruction_->IsStaticFieldGet() ||
587 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100588 instruction_->IsArraySet() ||
Roland Levillain44015862016-01-22 11:47:17 +0000589 instruction_->IsLoadClass() ||
590 instruction_->IsLoadString() ||
591 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100592 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100593 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
594 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000595 << "Unexpected instruction in read barrier marking slow path: "
596 << instruction_->DebugName();
597
598 __ Bind(GetEntryLabel());
Roland Levillain4359e612016-07-20 11:32:19 +0100599 // No need to save live registers; it's taken care of by the
600 // entrypoint. Also, there is no need to update the stack mask,
601 // as this runtime call will not trigger a garbage collection.
Roland Levillain44015862016-01-22 11:47:17 +0000602 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100603 DCHECK_NE(obj_.reg(), LR);
604 DCHECK_NE(obj_.reg(), WSP);
605 DCHECK_NE(obj_.reg(), WZR);
Roland Levillain0b671c02016-08-19 12:02:34 +0100606 // IP0 is used internally by the ReadBarrierMarkRegX entry point
607 // as a temporary, it cannot be the entry point's input/output.
Mathieu Chartier36a270a2016-07-28 18:08:51 -0700608 DCHECK_NE(obj_.reg(), IP0);
Roland Levillain02b75802016-07-13 11:54:35 +0100609 DCHECK(0 <= obj_.reg() && obj_.reg() < kNumberOfWRegisters) << obj_.reg();
610 // "Compact" slow path, saving two moves.
611 //
612 // Instead of using the standard runtime calling convention (input
613 // and output in W0):
614 //
615 // W0 <- obj
616 // W0 <- ReadBarrierMark(W0)
617 // obj <- W0
618 //
619 // we just use rX (the register holding `obj`) as input and output
620 // of a dedicated entrypoint:
621 //
622 // rX <- ReadBarrierMarkRegX(rX)
623 //
624 int32_t entry_point_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -0700625 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(obj_.reg());
Roland Levillaindec8f632016-07-22 17:10:06 +0100626 // This runtime call does not require a stack map.
627 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain44015862016-01-22 11:47:17 +0000628 __ B(GetExitLabel());
629 }
630
631 private:
Roland Levillain44015862016-01-22 11:47:17 +0000632 const Location obj_;
633
634 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
635};
636
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000637// Slow path generating a read barrier for a heap reference.
638class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
639 public:
640 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
641 Location out,
642 Location ref,
643 Location obj,
644 uint32_t offset,
645 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000646 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000647 out_(out),
648 ref_(ref),
649 obj_(obj),
650 offset_(offset),
651 index_(index) {
652 DCHECK(kEmitCompilerReadBarrier);
653 // If `obj` is equal to `out` or `ref`, it means the initial object
654 // has been overwritten by (or after) the heap object reference load
655 // to be instrumented, e.g.:
656 //
657 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000658 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000659 //
660 // In that case, we have lost the information about the original
661 // object, and the emitted read barrier cannot work properly.
662 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
663 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
664 }
665
666 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
667 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
668 LocationSummary* locations = instruction_->GetLocations();
669 Primitive::Type type = Primitive::kPrimNot;
670 DCHECK(locations->CanCall());
671 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100672 DCHECK(instruction_->IsInstanceFieldGet() ||
673 instruction_->IsStaticFieldGet() ||
674 instruction_->IsArrayGet() ||
675 instruction_->IsInstanceOf() ||
676 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100677 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000678 << "Unexpected instruction in read barrier for heap reference slow path: "
679 << instruction_->DebugName();
Roland Levillain4a3aa572016-08-15 13:17:06 +0000680 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000681 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100682 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000683
684 __ Bind(GetEntryLabel());
685
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000686 SaveLiveRegisters(codegen, locations);
687
688 // We may have to change the index's value, but as `index_` is a
689 // constant member (like other "inputs" of this slow path),
690 // introduce a copy of it, `index`.
691 Location index = index_;
692 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100693 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000694 if (instruction_->IsArrayGet()) {
695 // Compute the actual memory offset and store it in `index`.
696 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
697 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
698 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
699 // We are about to change the value of `index_reg` (see the
700 // calls to vixl::MacroAssembler::Lsl and
701 // vixl::MacroAssembler::Mov below), but it has
702 // not been saved by the previous call to
703 // art::SlowPathCode::SaveLiveRegisters, as it is a
704 // callee-save register --
705 // art::SlowPathCode::SaveLiveRegisters does not consider
706 // callee-save registers, as it has been designed with the
707 // assumption that callee-save registers are supposed to be
708 // handled by the called function. So, as a callee-save
709 // register, `index_reg` _would_ eventually be saved onto
710 // the stack, but it would be too late: we would have
711 // changed its value earlier. Therefore, we manually save
712 // it here into another freely available register,
713 // `free_reg`, chosen of course among the caller-save
714 // registers (as a callee-save `free_reg` register would
715 // exhibit the same problem).
716 //
717 // Note we could have requested a temporary register from
718 // the register allocator instead; but we prefer not to, as
719 // this is a slow path, and we know we can find a
720 // caller-save register that is available.
721 Register free_reg = FindAvailableCallerSaveRegister(codegen);
722 __ Mov(free_reg.W(), index_reg);
723 index_reg = free_reg;
724 index = LocationFrom(index_reg);
725 } else {
726 // The initial register stored in `index_` has already been
727 // saved in the call to art::SlowPathCode::SaveLiveRegisters
728 // (as it is not a callee-save register), so we can freely
729 // use it.
730 }
731 // Shifting the index value contained in `index_reg` by the scale
732 // factor (2) cannot overflow in practice, as the runtime is
733 // unable to allocate object arrays with a size larger than
734 // 2^26 - 1 (that is, 2^28 - 4 bytes).
735 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
736 static_assert(
737 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
738 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
739 __ Add(index_reg, index_reg, Operand(offset_));
740 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100741 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
742 // intrinsics, `index_` is not shifted by a scale factor of 2
743 // (as in the case of ArrayGet), as it is actually an offset
744 // to an object field within an object.
745 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000746 DCHECK(instruction_->GetLocations()->Intrinsified());
747 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
748 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
749 << instruction_->AsInvoke()->GetIntrinsic();
750 DCHECK_EQ(offset_, 0U);
Roland Levillaina7426c62016-08-03 15:02:10 +0100751 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000752 }
753 }
754
755 // We're moving two or three locations to locations that could
756 // overlap, so we need a parallel move resolver.
757 InvokeRuntimeCallingConvention calling_convention;
758 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
759 parallel_move.AddMove(ref_,
760 LocationFrom(calling_convention.GetRegisterAt(0)),
761 type,
762 nullptr);
763 parallel_move.AddMove(obj_,
764 LocationFrom(calling_convention.GetRegisterAt(1)),
765 type,
766 nullptr);
767 if (index.IsValid()) {
768 parallel_move.AddMove(index,
769 LocationFrom(calling_convention.GetRegisterAt(2)),
770 Primitive::kPrimInt,
771 nullptr);
772 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
773 } else {
774 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
775 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
776 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000777 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000778 instruction_,
779 instruction_->GetDexPc(),
780 this);
781 CheckEntrypointTypes<
782 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
783 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
784
785 RestoreLiveRegisters(codegen, locations);
786
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000787 __ B(GetExitLabel());
788 }
789
790 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
791
792 private:
793 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100794 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
795 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000796 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
797 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
798 return Register(VIXLRegCodeFromART(i), kXRegSize);
799 }
800 }
801 // We shall never fail to find a free caller-save register, as
802 // there are more than two core caller-save registers on ARM64
803 // (meaning it is possible to find one which is different from
804 // `ref` and `obj`).
805 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
806 LOG(FATAL) << "Could not find a free register";
807 UNREACHABLE();
808 }
809
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000810 const Location out_;
811 const Location ref_;
812 const Location obj_;
813 const uint32_t offset_;
814 // An additional location containing an index to an array.
815 // Only used for HArrayGet and the UnsafeGetObject &
816 // UnsafeGetObjectVolatile intrinsics.
817 const Location index_;
818
819 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
820};
821
822// Slow path generating a read barrier for a GC root.
823class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
824 public:
825 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000826 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000827 DCHECK(kEmitCompilerReadBarrier);
828 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000829
830 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
831 LocationSummary* locations = instruction_->GetLocations();
832 Primitive::Type type = Primitive::kPrimNot;
833 DCHECK(locations->CanCall());
834 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000835 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
836 << "Unexpected instruction in read barrier for GC root slow path: "
837 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000838
839 __ Bind(GetEntryLabel());
840 SaveLiveRegisters(codegen, locations);
841
842 InvokeRuntimeCallingConvention calling_convention;
843 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
844 // The argument of the ReadBarrierForRootSlow is not a managed
845 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
846 // thus we need a 64-bit move here, and we cannot use
847 //
848 // arm64_codegen->MoveLocation(
849 // LocationFrom(calling_convention.GetRegisterAt(0)),
850 // root_,
851 // type);
852 //
853 // which would emit a 32-bit move, as `type` is a (32-bit wide)
854 // reference type (`Primitive::kPrimNot`).
855 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000856 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000857 instruction_,
858 instruction_->GetDexPc(),
859 this);
860 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
861 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
862
863 RestoreLiveRegisters(codegen, locations);
864 __ B(GetExitLabel());
865 }
866
867 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
868
869 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000870 const Location out_;
871 const Location root_;
872
873 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
874};
875
Alexandre Rames5319def2014-10-23 10:03:10 +0100876#undef __
877
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100878Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100879 Location next_location;
880 if (type == Primitive::kPrimVoid) {
881 LOG(FATAL) << "Unreachable type " << type;
882 }
883
Alexandre Rames542361f2015-01-29 16:57:31 +0000884 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100885 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
886 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000887 } else if (!Primitive::IsFloatingPointType(type) &&
888 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000889 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
890 } else {
891 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000892 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
893 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100894 }
895
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000896 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000897 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100898 return next_location;
899}
900
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100901Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100902 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100903}
904
Serban Constantinescu579885a2015-02-22 20:51:33 +0000905CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
906 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100907 const CompilerOptions& compiler_options,
908 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100909 : CodeGenerator(graph,
910 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000911 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000912 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100913 callee_saved_core_registers.GetList(),
914 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100915 compiler_options,
916 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100917 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800918 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100919 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000920 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000921 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100922 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000923 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000924 uint32_literals_(std::less<uint32_t>(),
925 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100926 uint64_literals_(std::less<uint64_t>(),
927 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
928 method_patches_(MethodReferenceComparator(),
929 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
930 call_patches_(MethodReferenceComparator(),
931 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
932 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000933 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
934 boot_image_string_patches_(StringReferenceValueComparator(),
935 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
936 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100937 boot_image_type_patches_(TypeReferenceValueComparator(),
938 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
939 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000940 boot_image_address_patches_(std::less<uint32_t>(),
941 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000942 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000943 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000944}
Alexandre Rames5319def2014-10-23 10:03:10 +0100945
Alexandre Rames67555f72014-11-18 10:55:16 +0000946#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100947
Zheng Xu3927c8b2015-11-18 17:46:25 +0800948void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100949 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800950 jump_table->EmitTable(this);
951 }
952}
953
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000954void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800955 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000956 // Ensure we emit the literal pool.
957 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000958
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000959 CodeGenerator::Finalize(allocator);
960}
961
Zheng Xuad4450e2015-04-17 18:48:56 +0800962void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
963 // Note: There are 6 kinds of moves:
964 // 1. constant -> GPR/FPR (non-cycle)
965 // 2. constant -> stack (non-cycle)
966 // 3. GPR/FPR -> GPR/FPR
967 // 4. GPR/FPR -> stack
968 // 5. stack -> GPR/FPR
969 // 6. stack -> stack (non-cycle)
970 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
971 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
972 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
973 // dependency.
974 vixl_temps_.Open(GetVIXLAssembler());
975}
976
977void ParallelMoveResolverARM64::FinishEmitNativeCode() {
978 vixl_temps_.Close();
979}
980
981Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
982 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
983 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
984 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
985 Location scratch = GetScratchLocation(kind);
986 if (!scratch.Equals(Location::NoLocation())) {
987 return scratch;
988 }
989 // Allocate from VIXL temp registers.
990 if (kind == Location::kRegister) {
991 scratch = LocationFrom(vixl_temps_.AcquireX());
992 } else {
993 DCHECK(kind == Location::kFpuRegister);
994 scratch = LocationFrom(vixl_temps_.AcquireD());
995 }
996 AddScratchLocation(scratch);
997 return scratch;
998}
999
1000void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1001 if (loc.IsRegister()) {
1002 vixl_temps_.Release(XRegisterFrom(loc));
1003 } else {
1004 DCHECK(loc.IsFpuRegister());
1005 vixl_temps_.Release(DRegisterFrom(loc));
1006 }
1007 RemoveScratchLocation(loc);
1008}
1009
Alexandre Rames3e69f162014-12-10 10:36:50 +00001010void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001011 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001012 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001013}
1014
Alexandre Rames5319def2014-10-23 10:03:10 +01001015void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001016 MacroAssembler* masm = GetVIXLAssembler();
1017 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001018 __ Bind(&frame_entry_label_);
1019
Serban Constantinescu02164b32014-11-13 14:05:07 +00001020 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1021 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001022 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001023 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001024 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001025 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001026 __ Ldr(wzr, MemOperand(temp, 0));
1027 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001028 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001029
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001030 if (!HasEmptyFrame()) {
1031 int frame_size = GetFrameSize();
1032 // Stack layout:
1033 // sp[frame_size - 8] : lr.
1034 // ... : other preserved core registers.
1035 // ... : other preserved fp registers.
1036 // ... : reserved frame space.
1037 // sp[0] : current method.
1038 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001039 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001040 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1041 frame_size - GetCoreSpillSize());
1042 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1043 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001044 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001045}
1046
1047void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001048 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001049 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001050 if (!HasEmptyFrame()) {
1051 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001052 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1053 frame_size - FrameEntrySpillSize());
1054 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1055 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001056 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001057 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001058 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001059 __ Ret();
1060 GetAssembler()->cfi().RestoreState();
1061 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001062}
1063
Scott Wakeling97c72b72016-06-24 16:19:36 +01001064CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001065 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001066 return CPURegList(CPURegister::kRegister, kXRegSize,
1067 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001068}
1069
Scott Wakeling97c72b72016-06-24 16:19:36 +01001070CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001071 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1072 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001073 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1074 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001075}
1076
Alexandre Rames5319def2014-10-23 10:03:10 +01001077void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1078 __ Bind(GetLabelOf(block));
1079}
1080
Calin Juravle175dc732015-08-25 15:42:32 +01001081void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1082 DCHECK(location.IsRegister());
1083 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1084}
1085
Calin Juravlee460d1d2015-09-29 04:52:17 +01001086void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1087 if (location.IsRegister()) {
1088 locations->AddTemp(location);
1089 } else {
1090 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1091 }
1092}
1093
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001094void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001095 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001096 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001097 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001098 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001099 if (value_can_be_null) {
1100 __ Cbz(value, &done);
1101 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001102 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001103 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001104 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001105 if (value_can_be_null) {
1106 __ Bind(&done);
1107 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001108}
1109
David Brazdil58282f42016-01-14 12:45:10 +00001110void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001111 // Blocked core registers:
1112 // lr : Runtime reserved.
1113 // tr : Runtime reserved.
1114 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1115 // ip1 : VIXL core temp.
1116 // ip0 : VIXL core temp.
1117 //
1118 // Blocked fp registers:
1119 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001120 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1121 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001122 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001123 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001124 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001125
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001126 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001127 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001128 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001129 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001130
David Brazdil58282f42016-01-14 12:45:10 +00001131 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001132 // Stubs do not save callee-save floating point registers. If the graph
1133 // is debuggable, we need to deal with these registers differently. For
1134 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001135 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1136 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001137 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001138 }
1139 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001140}
1141
Alexandre Rames3e69f162014-12-10 10:36:50 +00001142size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1143 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1144 __ Str(reg, MemOperand(sp, stack_index));
1145 return kArm64WordSize;
1146}
1147
1148size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1149 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1150 __ Ldr(reg, MemOperand(sp, stack_index));
1151 return kArm64WordSize;
1152}
1153
1154size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1155 FPRegister reg = FPRegister(reg_id, kDRegSize);
1156 __ Str(reg, MemOperand(sp, stack_index));
1157 return kArm64WordSize;
1158}
1159
1160size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1161 FPRegister reg = FPRegister(reg_id, kDRegSize);
1162 __ Ldr(reg, MemOperand(sp, stack_index));
1163 return kArm64WordSize;
1164}
1165
Alexandre Rames5319def2014-10-23 10:03:10 +01001166void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001167 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001168}
1169
1170void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001171 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001172}
1173
Alexandre Rames67555f72014-11-18 10:55:16 +00001174void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001175 if (constant->IsIntConstant()) {
1176 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1177 } else if (constant->IsLongConstant()) {
1178 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1179 } else if (constant->IsNullConstant()) {
1180 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001181 } else if (constant->IsFloatConstant()) {
1182 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1183 } else {
1184 DCHECK(constant->IsDoubleConstant());
1185 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1186 }
1187}
1188
Alexandre Rames3e69f162014-12-10 10:36:50 +00001189
1190static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1191 DCHECK(constant.IsConstant());
1192 HConstant* cst = constant.GetConstant();
1193 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001194 // Null is mapped to a core W register, which we associate with kPrimInt.
1195 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001196 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1197 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1198 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1199}
1200
Calin Juravlee460d1d2015-09-29 04:52:17 +01001201void CodeGeneratorARM64::MoveLocation(Location destination,
1202 Location source,
1203 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001204 if (source.Equals(destination)) {
1205 return;
1206 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001207
1208 // A valid move can always be inferred from the destination and source
1209 // locations. When moving from and to a register, the argument type can be
1210 // used to generate 32bit instead of 64bit moves. In debug mode we also
1211 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001212 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001213
1214 if (destination.IsRegister() || destination.IsFpuRegister()) {
1215 if (unspecified_type) {
1216 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1217 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001218 (src_cst != nullptr && (src_cst->IsIntConstant()
1219 || src_cst->IsFloatConstant()
1220 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001221 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001222 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001223 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001224 // If the source is a double stack slot or a 64bit constant, a 64bit
1225 // type is appropriate. Else the source is a register, and since the
1226 // type has not been specified, we chose a 64bit type to force a 64bit
1227 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001228 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001229 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001230 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001231 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1232 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1233 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001234 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1235 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1236 __ Ldr(dst, StackOperandFrom(source));
1237 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001238 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001239 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001240 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001241 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001242 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001243 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001244 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001245 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1246 ? Primitive::kPrimLong
1247 : Primitive::kPrimInt;
1248 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1249 }
1250 } else {
1251 DCHECK(source.IsFpuRegister());
1252 if (destination.IsRegister()) {
1253 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1254 ? Primitive::kPrimDouble
1255 : Primitive::kPrimFloat;
1256 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1257 } else {
1258 DCHECK(destination.IsFpuRegister());
1259 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001260 }
1261 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001262 } else { // The destination is not a register. It must be a stack slot.
1263 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1264 if (source.IsRegister() || source.IsFpuRegister()) {
1265 if (unspecified_type) {
1266 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001267 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001268 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001269 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001270 }
1271 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001272 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1273 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1274 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001275 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001276 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1277 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001278 UseScratchRegisterScope temps(GetVIXLAssembler());
1279 HConstant* src_cst = source.GetConstant();
1280 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001281 if (src_cst->IsZeroBitPattern()) {
1282 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant()) ? xzr : wzr;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001283 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001284 if (src_cst->IsIntConstant()) {
1285 temp = temps.AcquireW();
1286 } else if (src_cst->IsLongConstant()) {
1287 temp = temps.AcquireX();
1288 } else if (src_cst->IsFloatConstant()) {
1289 temp = temps.AcquireS();
1290 } else {
1291 DCHECK(src_cst->IsDoubleConstant());
1292 temp = temps.AcquireD();
1293 }
1294 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001295 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001296 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001297 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001298 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001299 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001300 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001301 // There is generally less pressure on FP registers.
1302 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001303 __ Ldr(temp, StackOperandFrom(source));
1304 __ Str(temp, StackOperandFrom(destination));
1305 }
1306 }
1307}
1308
1309void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001310 CPURegister dst,
1311 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001312 switch (type) {
1313 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001314 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001315 break;
1316 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001317 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001318 break;
1319 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001320 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001321 break;
1322 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001323 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001324 break;
1325 case Primitive::kPrimInt:
1326 case Primitive::kPrimNot:
1327 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001328 case Primitive::kPrimFloat:
1329 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001330 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001331 __ Ldr(dst, src);
1332 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001333 case Primitive::kPrimVoid:
1334 LOG(FATAL) << "Unreachable type " << type;
1335 }
1336}
1337
Calin Juravle77520bc2015-01-12 18:45:46 +00001338void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001339 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001340 const MemOperand& src,
1341 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001342 MacroAssembler* masm = GetVIXLAssembler();
1343 BlockPoolsScope block_pools(masm);
1344 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001345 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001346 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001347
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001348 DCHECK(!src.IsPreIndex());
1349 DCHECK(!src.IsPostIndex());
1350
1351 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001352 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001353 MemOperand base = MemOperand(temp_base);
1354 switch (type) {
1355 case Primitive::kPrimBoolean:
1356 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001357 if (needs_null_check) {
1358 MaybeRecordImplicitNullCheck(instruction);
1359 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001360 break;
1361 case Primitive::kPrimByte:
1362 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001363 if (needs_null_check) {
1364 MaybeRecordImplicitNullCheck(instruction);
1365 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001366 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1367 break;
1368 case Primitive::kPrimChar:
1369 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001370 if (needs_null_check) {
1371 MaybeRecordImplicitNullCheck(instruction);
1372 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001373 break;
1374 case Primitive::kPrimShort:
1375 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001376 if (needs_null_check) {
1377 MaybeRecordImplicitNullCheck(instruction);
1378 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001379 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1380 break;
1381 case Primitive::kPrimInt:
1382 case Primitive::kPrimNot:
1383 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001384 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001385 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001386 if (needs_null_check) {
1387 MaybeRecordImplicitNullCheck(instruction);
1388 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001389 break;
1390 case Primitive::kPrimFloat:
1391 case Primitive::kPrimDouble: {
1392 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001393 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001394
1395 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1396 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001397 if (needs_null_check) {
1398 MaybeRecordImplicitNullCheck(instruction);
1399 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001400 __ Fmov(FPRegister(dst), temp);
1401 break;
1402 }
1403 case Primitive::kPrimVoid:
1404 LOG(FATAL) << "Unreachable type " << type;
1405 }
1406}
1407
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001408void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001409 CPURegister src,
1410 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001411 switch (type) {
1412 case Primitive::kPrimBoolean:
1413 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001414 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001415 break;
1416 case Primitive::kPrimChar:
1417 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001418 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001419 break;
1420 case Primitive::kPrimInt:
1421 case Primitive::kPrimNot:
1422 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001423 case Primitive::kPrimFloat:
1424 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001425 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001426 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001427 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001428 case Primitive::kPrimVoid:
1429 LOG(FATAL) << "Unreachable type " << type;
1430 }
1431}
1432
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001433void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1434 CPURegister src,
1435 const MemOperand& dst) {
1436 UseScratchRegisterScope temps(GetVIXLAssembler());
1437 Register temp_base = temps.AcquireX();
1438
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001439 DCHECK(!dst.IsPreIndex());
1440 DCHECK(!dst.IsPostIndex());
1441
1442 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001443 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001444 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001445 MemOperand base = MemOperand(temp_base);
1446 switch (type) {
1447 case Primitive::kPrimBoolean:
1448 case Primitive::kPrimByte:
1449 __ Stlrb(Register(src), base);
1450 break;
1451 case Primitive::kPrimChar:
1452 case Primitive::kPrimShort:
1453 __ Stlrh(Register(src), base);
1454 break;
1455 case Primitive::kPrimInt:
1456 case Primitive::kPrimNot:
1457 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001458 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001459 __ Stlr(Register(src), base);
1460 break;
1461 case Primitive::kPrimFloat:
1462 case Primitive::kPrimDouble: {
Alexandre Rames542361f2015-01-29 16:57:31 +00001463 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001464 Register temp_src;
1465 if (src.IsZero()) {
1466 // The zero register is used to avoid synthesizing zero constants.
1467 temp_src = Register(src);
1468 } else {
1469 DCHECK(src.IsFPRegister());
1470 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1471 __ Fmov(temp_src, FPRegister(src));
1472 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001473
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001474 __ Stlr(temp_src, base);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001475 break;
1476 }
1477 case Primitive::kPrimVoid:
1478 LOG(FATAL) << "Unreachable type " << type;
1479 }
1480}
1481
Calin Juravle175dc732015-08-25 15:42:32 +01001482void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1483 HInstruction* instruction,
1484 uint32_t dex_pc,
1485 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001486 ValidateInvokeRuntime(instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001487 GenerateInvokeRuntime(GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value());
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001488 if (EntrypointRequiresStackMap(entrypoint)) {
1489 RecordPcInfo(instruction, dex_pc, slow_path);
1490 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001491}
1492
Roland Levillaindec8f632016-07-22 17:10:06 +01001493void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1494 HInstruction* instruction,
1495 SlowPathCode* slow_path) {
1496 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001497 GenerateInvokeRuntime(entry_point_offset);
1498}
1499
1500void CodeGeneratorARM64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001501 BlockPoolsScope block_pools(GetVIXLAssembler());
1502 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1503 __ Blr(lr);
1504}
1505
Alexandre Rames67555f72014-11-18 10:55:16 +00001506void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001507 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001508 UseScratchRegisterScope temps(GetVIXLAssembler());
1509 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001510 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1511
Serban Constantinescu02164b32014-11-13 14:05:07 +00001512 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001513 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1514 __ Add(temp, class_reg, status_offset);
1515 __ Ldar(temp, HeapOperand(temp));
1516 __ Cmp(temp, mirror::Class::kStatusInitialized);
1517 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001518 __ Bind(slow_path->GetExitLabel());
1519}
Alexandre Rames5319def2014-10-23 10:03:10 +01001520
Roland Levillain44015862016-01-22 11:47:17 +00001521void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001522 BarrierType type = BarrierAll;
1523
1524 switch (kind) {
1525 case MemBarrierKind::kAnyAny:
1526 case MemBarrierKind::kAnyStore: {
1527 type = BarrierAll;
1528 break;
1529 }
1530 case MemBarrierKind::kLoadAny: {
1531 type = BarrierReads;
1532 break;
1533 }
1534 case MemBarrierKind::kStoreStore: {
1535 type = BarrierWrites;
1536 break;
1537 }
1538 default:
1539 LOG(FATAL) << "Unexpected memory barrier " << kind;
1540 }
1541 __ Dmb(InnerShareable, type);
1542}
1543
Serban Constantinescu02164b32014-11-13 14:05:07 +00001544void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1545 HBasicBlock* successor) {
1546 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001547 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1548 if (slow_path == nullptr) {
1549 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1550 instruction->SetSlowPath(slow_path);
1551 codegen_->AddSlowPath(slow_path);
1552 if (successor != nullptr) {
1553 DCHECK(successor->IsLoopHeader());
1554 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1555 }
1556 } else {
1557 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1558 }
1559
Serban Constantinescu02164b32014-11-13 14:05:07 +00001560 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1561 Register temp = temps.AcquireW();
1562
Andreas Gampe542451c2016-07-26 09:02:02 -07001563 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001564 if (successor == nullptr) {
1565 __ Cbnz(temp, slow_path->GetEntryLabel());
1566 __ Bind(slow_path->GetReturnLabel());
1567 } else {
1568 __ Cbz(temp, codegen_->GetLabelOf(successor));
1569 __ B(slow_path->GetEntryLabel());
1570 // slow_path will return to GetLabelOf(successor).
1571 }
1572}
1573
Alexandre Rames5319def2014-10-23 10:03:10 +01001574InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1575 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001576 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001577 assembler_(codegen->GetAssembler()),
1578 codegen_(codegen) {}
1579
1580#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001581 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001582
1583#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1584
1585enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001586 // Using a base helps identify when we hit such breakpoints.
1587 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001588#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1589 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1590#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1591};
1592
1593#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001594 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001595 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1596 } \
1597 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1598 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1599 locations->SetOut(Location::Any()); \
1600 }
1601 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1602#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1603
1604#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001605#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001606
Alexandre Rames67555f72014-11-18 10:55:16 +00001607void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001608 DCHECK_EQ(instr->InputCount(), 2U);
1609 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1610 Primitive::Type type = instr->GetResultType();
1611 switch (type) {
1612 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001613 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001614 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001615 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001616 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001617 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001618
1619 case Primitive::kPrimFloat:
1620 case Primitive::kPrimDouble:
1621 locations->SetInAt(0, Location::RequiresFpuRegister());
1622 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001623 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001624 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001625
Alexandre Rames5319def2014-10-23 10:03:10 +01001626 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001627 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001628 }
1629}
1630
Alexandre Rames09a99962015-04-15 11:47:56 +01001631void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001632 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1633
1634 bool object_field_get_with_read_barrier =
1635 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001636 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001637 new (GetGraph()->GetArena()) LocationSummary(instruction,
1638 object_field_get_with_read_barrier ?
1639 LocationSummary::kCallOnSlowPath :
1640 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001641 locations->SetInAt(0, Location::RequiresRegister());
1642 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1643 locations->SetOut(Location::RequiresFpuRegister());
1644 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001645 // The output overlaps for an object field get when read barriers
1646 // are enabled: we do not want the load to overwrite the object's
1647 // location, as we need it to emit the read barrier.
1648 locations->SetOut(
1649 Location::RequiresRegister(),
1650 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001651 }
1652}
1653
1654void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1655 const FieldInfo& field_info) {
1656 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001657 LocationSummary* locations = instruction->GetLocations();
1658 Location base_loc = locations->InAt(0);
1659 Location out = locations->Out();
1660 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001661 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001662 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001663 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001664
Roland Levillain44015862016-01-22 11:47:17 +00001665 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1666 // Object FieldGet with Baker's read barrier case.
1667 MacroAssembler* masm = GetVIXLAssembler();
1668 UseScratchRegisterScope temps(masm);
1669 // /* HeapReference<Object> */ out = *(base + offset)
1670 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1671 Register temp = temps.AcquireW();
1672 // Note that potential implicit null checks are handled in this
1673 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1674 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1675 instruction,
1676 out,
1677 base,
1678 offset,
1679 temp,
1680 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001681 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001682 } else {
1683 // General case.
1684 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001685 // Note that a potential implicit null check is handled in this
1686 // CodeGeneratorARM64::LoadAcquire call.
1687 // NB: LoadAcquire will record the pc info if needed.
1688 codegen_->LoadAcquire(
1689 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001690 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001691 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001692 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001693 }
Roland Levillain44015862016-01-22 11:47:17 +00001694 if (field_type == Primitive::kPrimNot) {
1695 // If read barriers are enabled, emit read barriers other than
1696 // Baker's using a slow path (and also unpoison the loaded
1697 // reference, if heap poisoning is enabled).
1698 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1699 }
Roland Levillain4d027112015-07-01 15:41:14 +01001700 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001701}
1702
1703void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1704 LocationSummary* locations =
1705 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1706 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001707 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
1708 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
1709 } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001710 locations->SetInAt(1, Location::RequiresFpuRegister());
1711 } else {
1712 locations->SetInAt(1, Location::RequiresRegister());
1713 }
1714}
1715
1716void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001717 const FieldInfo& field_info,
1718 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001719 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001720 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001721
1722 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001723 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001724 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001725 Offset offset = field_info.GetFieldOffset();
1726 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001727
Roland Levillain4d027112015-07-01 15:41:14 +01001728 {
1729 // We use a block to end the scratch scope before the write barrier, thus
1730 // freeing the temporary registers so they can be used in `MarkGCCard`.
1731 UseScratchRegisterScope temps(GetVIXLAssembler());
1732
1733 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1734 DCHECK(value.IsW());
1735 Register temp = temps.AcquireW();
1736 __ Mov(temp, value.W());
1737 GetAssembler()->PoisonHeapReference(temp.W());
1738 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001739 }
Roland Levillain4d027112015-07-01 15:41:14 +01001740
1741 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001742 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1743 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001744 } else {
1745 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1746 codegen_->MaybeRecordImplicitNullCheck(instruction);
1747 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001748 }
1749
1750 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001751 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001752 }
1753}
1754
Alexandre Rames67555f72014-11-18 10:55:16 +00001755void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001756 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001757
1758 switch (type) {
1759 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001760 case Primitive::kPrimLong: {
1761 Register dst = OutputRegister(instr);
1762 Register lhs = InputRegisterAt(instr, 0);
1763 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001764 if (instr->IsAdd()) {
1765 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001766 } else if (instr->IsAnd()) {
1767 __ And(dst, lhs, rhs);
1768 } else if (instr->IsOr()) {
1769 __ Orr(dst, lhs, rhs);
1770 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001771 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001772 } else if (instr->IsRor()) {
1773 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001774 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001775 __ Ror(dst, lhs, shift);
1776 } else {
1777 // Ensure shift distance is in the same size register as the result. If
1778 // we are rotating a long and the shift comes in a w register originally,
1779 // we don't need to sxtw for use as an x since the shift distances are
1780 // all & reg_bits - 1.
1781 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1782 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001783 } else {
1784 DCHECK(instr->IsXor());
1785 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001786 }
1787 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001788 }
1789 case Primitive::kPrimFloat:
1790 case Primitive::kPrimDouble: {
1791 FPRegister dst = OutputFPRegister(instr);
1792 FPRegister lhs = InputFPRegisterAt(instr, 0);
1793 FPRegister rhs = InputFPRegisterAt(instr, 1);
1794 if (instr->IsAdd()) {
1795 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001796 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001797 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001798 } else {
1799 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001800 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001801 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001802 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001803 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001804 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001805 }
1806}
1807
Serban Constantinescu02164b32014-11-13 14:05:07 +00001808void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1809 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1810
1811 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1812 Primitive::Type type = instr->GetResultType();
1813 switch (type) {
1814 case Primitive::kPrimInt:
1815 case Primitive::kPrimLong: {
1816 locations->SetInAt(0, Location::RequiresRegister());
1817 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1818 locations->SetOut(Location::RequiresRegister());
1819 break;
1820 }
1821 default:
1822 LOG(FATAL) << "Unexpected shift type " << type;
1823 }
1824}
1825
1826void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1827 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1828
1829 Primitive::Type type = instr->GetType();
1830 switch (type) {
1831 case Primitive::kPrimInt:
1832 case Primitive::kPrimLong: {
1833 Register dst = OutputRegister(instr);
1834 Register lhs = InputRegisterAt(instr, 0);
1835 Operand rhs = InputOperandAt(instr, 1);
1836 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001837 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00001838 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001839 if (instr->IsShl()) {
1840 __ Lsl(dst, lhs, shift_value);
1841 } else if (instr->IsShr()) {
1842 __ Asr(dst, lhs, shift_value);
1843 } else {
1844 __ Lsr(dst, lhs, shift_value);
1845 }
1846 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001847 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001848
1849 if (instr->IsShl()) {
1850 __ Lsl(dst, lhs, rhs_reg);
1851 } else if (instr->IsShr()) {
1852 __ Asr(dst, lhs, rhs_reg);
1853 } else {
1854 __ Lsr(dst, lhs, rhs_reg);
1855 }
1856 }
1857 break;
1858 }
1859 default:
1860 LOG(FATAL) << "Unexpected shift operation type " << type;
1861 }
1862}
1863
Alexandre Rames5319def2014-10-23 10:03:10 +01001864void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001865 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001866}
1867
1868void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001869 HandleBinaryOp(instruction);
1870}
1871
1872void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1873 HandleBinaryOp(instruction);
1874}
1875
1876void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1877 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001878}
1879
Artem Serov7fc63502016-02-09 17:15:29 +00001880void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001881 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1882 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1883 locations->SetInAt(0, Location::RequiresRegister());
1884 // There is no immediate variant of negated bitwise instructions in AArch64.
1885 locations->SetInAt(1, Location::RequiresRegister());
1886 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1887}
1888
Artem Serov7fc63502016-02-09 17:15:29 +00001889void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001890 Register dst = OutputRegister(instr);
1891 Register lhs = InputRegisterAt(instr, 0);
1892 Register rhs = InputRegisterAt(instr, 1);
1893
1894 switch (instr->GetOpKind()) {
1895 case HInstruction::kAnd:
1896 __ Bic(dst, lhs, rhs);
1897 break;
1898 case HInstruction::kOr:
1899 __ Orn(dst, lhs, rhs);
1900 break;
1901 case HInstruction::kXor:
1902 __ Eon(dst, lhs, rhs);
1903 break;
1904 default:
1905 LOG(FATAL) << "Unreachable";
1906 }
1907}
1908
Alexandre Rames8626b742015-11-25 16:28:08 +00001909void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1910 HArm64DataProcWithShifterOp* instruction) {
1911 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1912 instruction->GetType() == Primitive::kPrimLong);
1913 LocationSummary* locations =
1914 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1915 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1916 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1917 } else {
1918 locations->SetInAt(0, Location::RequiresRegister());
1919 }
1920 locations->SetInAt(1, Location::RequiresRegister());
1921 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1922}
1923
1924void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1925 HArm64DataProcWithShifterOp* instruction) {
1926 Primitive::Type type = instruction->GetType();
1927 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1928 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1929 Register out = OutputRegister(instruction);
1930 Register left;
1931 if (kind != HInstruction::kNeg) {
1932 left = InputRegisterAt(instruction, 0);
1933 }
1934 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1935 // shifter operand operation, the IR generating `right_reg` (input to the type
1936 // conversion) can have a different type from the current instruction's type,
1937 // so we manually indicate the type.
1938 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001939 int64_t shift_amount = instruction->GetShiftAmount() &
1940 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001941
1942 Operand right_operand(0);
1943
1944 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1945 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1946 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1947 } else {
1948 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1949 }
1950
1951 // Logical binary operations do not support extension operations in the
1952 // operand. Note that VIXL would still manage if it was passed by generating
1953 // the extension as a separate instruction.
1954 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1955 DCHECK(!right_operand.IsExtendedRegister() ||
1956 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1957 kind != HInstruction::kNeg));
1958 switch (kind) {
1959 case HInstruction::kAdd:
1960 __ Add(out, left, right_operand);
1961 break;
1962 case HInstruction::kAnd:
1963 __ And(out, left, right_operand);
1964 break;
1965 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001966 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001967 __ Neg(out, right_operand);
1968 break;
1969 case HInstruction::kOr:
1970 __ Orr(out, left, right_operand);
1971 break;
1972 case HInstruction::kSub:
1973 __ Sub(out, left, right_operand);
1974 break;
1975 case HInstruction::kXor:
1976 __ Eor(out, left, right_operand);
1977 break;
1978 default:
1979 LOG(FATAL) << "Unexpected operation kind: " << kind;
1980 UNREACHABLE();
1981 }
1982}
1983
Artem Serov328429f2016-07-06 16:23:04 +01001984void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00001985 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
1986 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001987 LocationSummary* locations =
1988 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1989 locations->SetInAt(0, Location::RequiresRegister());
1990 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1991 locations->SetOut(Location::RequiresRegister());
1992}
1993
Roland Levillain4a3aa572016-08-15 13:17:06 +00001994void InstructionCodeGeneratorARM64::VisitIntermediateAddress(
1995 HIntermediateAddress* instruction) {
1996 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
1997 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001998 __ Add(OutputRegister(instruction),
1999 InputRegisterAt(instruction, 0),
2000 Operand(InputOperandAt(instruction, 1)));
2001}
2002
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002003void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002004 LocationSummary* locations =
2005 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002006 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2007 if (instr->GetOpKind() == HInstruction::kSub &&
2008 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002009 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002010 // Don't allocate register for Mneg instruction.
2011 } else {
2012 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2013 Location::RequiresRegister());
2014 }
2015 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2016 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002017 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2018}
2019
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002020void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002021 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002022 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2023 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002024
2025 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2026 // This fixup should be carried out for all multiply-accumulate instructions:
2027 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2028 if (instr->GetType() == Primitive::kPrimLong &&
2029 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2030 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002031 vixl::aarch64::Instruction* prev =
2032 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002033 if (prev->IsLoadOrStore()) {
2034 // Make sure we emit only exactly one nop.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002035 vixl::aarch64::CodeBufferCheckScope scope(masm,
2036 kInstructionSize,
2037 vixl::aarch64::CodeBufferCheckScope::kCheck,
2038 vixl::aarch64::CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002039 __ nop();
2040 }
2041 }
2042
2043 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002044 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002045 __ Madd(res, mul_left, mul_right, accumulator);
2046 } else {
2047 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002048 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002049 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002050 __ Mneg(res, mul_left, mul_right);
2051 } else {
2052 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2053 __ Msub(res, mul_left, mul_right, accumulator);
2054 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002055 }
2056}
2057
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002058void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002059 bool object_array_get_with_read_barrier =
2060 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002061 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002062 new (GetGraph()->GetArena()) LocationSummary(instruction,
2063 object_array_get_with_read_barrier ?
2064 LocationSummary::kCallOnSlowPath :
2065 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002066 locations->SetInAt(0, Location::RequiresRegister());
2067 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002068 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2069 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2070 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002071 // The output overlaps in the case of an object array get with
2072 // read barriers enabled: we do not want the move to overwrite the
2073 // array's location, as we need it to emit the read barrier.
2074 locations->SetOut(
2075 Location::RequiresRegister(),
2076 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002077 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002078}
2079
2080void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002081 Primitive::Type type = instruction->GetType();
2082 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002083 LocationSummary* locations = instruction->GetLocations();
2084 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002085 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002086 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002087
Alexandre Ramesd921d642015-04-16 15:07:16 +01002088 MacroAssembler* masm = GetVIXLAssembler();
2089 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002090 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002091 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002092
Roland Levillain44015862016-01-22 11:47:17 +00002093 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2094 // Object ArrayGet with Baker's read barrier case.
2095 Register temp = temps.AcquireW();
Roland Levillain4a3aa572016-08-15 13:17:06 +00002096 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
2097 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Roland Levillain44015862016-01-22 11:47:17 +00002098 // Note that a potential implicit null check is handled in the
2099 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2100 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2101 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002102 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002103 // General case.
2104 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002105 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002106 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2107 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002108 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002109 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002110 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002111 // The read barrier instrumentation does not support the
2112 // HIntermediateAddress instruction yet.
2113 DCHECK(!kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00002114 // We do not need to compute the intermediate address from the array: the
2115 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002116 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002117 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002118 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002119 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2120 }
2121 temp = obj;
2122 } else {
2123 __ Add(temp, obj, offset);
2124 }
2125 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2126 }
2127
2128 codegen_->Load(type, OutputCPURegister(instruction), source);
2129 codegen_->MaybeRecordImplicitNullCheck(instruction);
2130
2131 if (type == Primitive::kPrimNot) {
2132 static_assert(
2133 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2134 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2135 Location obj_loc = locations->InAt(0);
2136 if (index.IsConstant()) {
2137 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2138 } else {
2139 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2140 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002141 }
Roland Levillain4d027112015-07-01 15:41:14 +01002142 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002143}
2144
Alexandre Rames5319def2014-10-23 10:03:10 +01002145void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2146 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2147 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002148 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002149}
2150
2151void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002152 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002153 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Markodce016e2016-04-28 13:10:02 +01002154 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002155 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002156}
2157
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002158void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002159 Primitive::Type value_type = instruction->GetComponentType();
2160
2161 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002162 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2163 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002164 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002165 LocationSummary::kCallOnSlowPath :
2166 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002167 locations->SetInAt(0, Location::RequiresRegister());
2168 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002169 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2170 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2171 } else if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002172 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002173 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002174 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002175 }
Roland Levillaina8c6d702016-08-26 11:17:44 +01002176 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && (value_type == Primitive::kPrimNot)) {
Roland Levillain16d9f942016-08-25 17:27:56 +01002177 // Additional temporary registers for a Baker read barrier.
2178 locations->AddTemp(Location::RequiresRegister());
2179 locations->AddTemp(Location::RequiresRegister());
2180 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002181}
2182
2183void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2184 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002185 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002186 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002187 bool needs_write_barrier =
2188 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002189
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002190 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002191 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002192 CPURegister source = value;
2193 Location index = locations->InAt(1);
2194 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2195 MemOperand destination = HeapOperand(array);
2196 MacroAssembler* masm = GetVIXLAssembler();
2197 BlockPoolsScope block_pools(masm);
2198
2199 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002200 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002201 if (index.IsConstant()) {
2202 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2203 destination = HeapOperand(array, offset);
2204 } else {
2205 UseScratchRegisterScope temps(masm);
2206 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002207 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002208 // The read barrier instrumentation does not support the
2209 // HIntermediateAddress instruction yet.
2210 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002211 // We do not need to compute the intermediate address from the array: the
2212 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002213 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002214 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002215 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002216 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2217 }
2218 temp = array;
2219 } else {
2220 __ Add(temp, array, offset);
2221 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002222 destination = HeapOperand(temp,
2223 XRegisterFrom(index),
2224 LSL,
2225 Primitive::ComponentSizeShift(value_type));
2226 }
2227 codegen_->Store(value_type, value, destination);
2228 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002229 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002230 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002231 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002232 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002233 {
2234 // We use a block to end the scratch scope before the write barrier, thus
2235 // freeing the temporary registers so they can be used in `MarkGCCard`.
2236 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002237 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002238 if (index.IsConstant()) {
2239 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002240 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002241 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002242 destination = HeapOperand(temp,
2243 XRegisterFrom(index),
2244 LSL,
2245 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002246 }
2247
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002248 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2249 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2250 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2251
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002252 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002253 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2254 codegen_->AddSlowPath(slow_path);
2255 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002256 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002257 __ Cbnz(Register(value), &non_zero);
2258 if (!index.IsConstant()) {
2259 __ Add(temp, array, offset);
2260 }
2261 __ Str(wzr, destination);
2262 codegen_->MaybeRecordImplicitNullCheck(instruction);
2263 __ B(&done);
2264 __ Bind(&non_zero);
2265 }
2266
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002267 if (kEmitCompilerReadBarrier) {
Roland Levillain16d9f942016-08-25 17:27:56 +01002268 if (!kUseBakerReadBarrier) {
2269 // When (non-Baker) read barriers are enabled, the type
2270 // checking instrumentation requires two read barriers
2271 // generated by CodeGeneratorARM64::GenerateReadBarrierSlow:
2272 //
2273 // __ Mov(temp2, temp);
2274 // // /* HeapReference<Class> */ temp = temp->component_type_
2275 // __ Ldr(temp, HeapOperand(temp, component_offset));
2276 // codegen_->GenerateReadBarrierSlow(
2277 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2278 //
2279 // // /* HeapReference<Class> */ temp2 = value->klass_
2280 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2281 // codegen_->GenerateReadBarrierSlow(
2282 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2283 //
2284 // __ Cmp(temp, temp2);
2285 //
2286 // However, the second read barrier may trash `temp`, as it
2287 // is a temporary register, and as such would not be saved
2288 // along with live registers before calling the runtime (nor
2289 // restored afterwards). So in this case, we bail out and
2290 // delegate the work to the array set slow path.
2291 //
2292 // TODO: Extend the register allocator to support a new
2293 // "(locally) live temp" location so as to avoid always
2294 // going into the slow path when read barriers are enabled?
2295 //
2296 // There is no such problem with Baker read barriers (see below).
2297 __ B(slow_path->GetEntryLabel());
2298 } else {
2299 // Note that we cannot use `temps` (instance of VIXL's
2300 // UseScratchRegisterScope) to allocate `temp2` because
2301 // the Baker read barriers generated by
2302 // GenerateFieldLoadWithBakerReadBarrier below also use
2303 // that facility to allocate a temporary register, thus
2304 // making VIXL's scratch register pool empty.
2305 Location temp2_loc = locations->GetTemp(0);
2306 Register temp2 = WRegisterFrom(temp2_loc);
2307
2308 // Note: Because it is acquired from VIXL's scratch register
2309 // pool, `temp` might be IP0, and thus cannot be used as
2310 // `ref` argument of GenerateFieldLoadWithBakerReadBarrier
2311 // calls below (see ReadBarrierMarkSlowPathARM64 for more
2312 // details).
2313
2314 // /* HeapReference<Class> */ temp2 = array->klass_
2315 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2316 temp2_loc,
2317 array,
2318 class_offset,
2319 temp,
2320 /* needs_null_check */ true,
2321 /* use_load_acquire */ false);
2322
2323 // /* HeapReference<Class> */ temp2 = temp2->component_type_
2324 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2325 temp2_loc,
2326 temp2,
2327 component_offset,
2328 temp,
2329 /* needs_null_check */ false,
2330 /* use_load_acquire */ false);
2331 // For the same reason that we request `temp2` from the
2332 // register allocator above, we cannot get `temp3` from
2333 // VIXL's scratch register pool.
2334 Location temp3_loc = locations->GetTemp(1);
2335 Register temp3 = WRegisterFrom(temp3_loc);
2336 // Register `temp2` is not trashed by the read barrier
2337 // emitted by GenerateFieldLoadWithBakerReadBarrier below,
2338 // as that method produces a call to a ReadBarrierMarkRegX
2339 // entry point, which saves all potentially live registers,
2340 // including temporaries such a `temp2`.
2341 // /* HeapReference<Class> */ temp3 = register_value->klass_
2342 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2343 temp3_loc,
2344 value.W(),
2345 class_offset,
2346 temp,
2347 /* needs_null_check */ false,
2348 /* use_load_acquire */ false);
2349 // If heap poisoning is enabled, `temp2` and `temp3` have
2350 // been unpoisoned by the the previous calls to
2351 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier.
2352 __ Cmp(temp2, temp3);
2353
2354 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2355 vixl::aarch64::Label do_put;
2356 __ B(eq, &do_put);
2357 // We do not need to emit a read barrier for the
2358 // following heap reference load, as `temp2` is only used
2359 // in a comparison with null below, and this reference
2360 // is not kept afterwards.
2361 // /* HeapReference<Class> */ temp = temp2->super_class_
2362 __ Ldr(temp, HeapOperand(temp2, super_offset));
2363 // If heap poisoning is enabled, no need to unpoison
2364 // `temp`, as we are comparing against null below.
2365 __ Cbnz(temp, slow_path->GetEntryLabel());
2366 __ Bind(&do_put);
2367 } else {
2368 __ B(ne, slow_path->GetEntryLabel());
2369 }
2370 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002371 } else {
Roland Levillain16d9f942016-08-25 17:27:56 +01002372 // Non read barrier code.
2373
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002374 Register temp2 = temps.AcquireSameSizeAs(array);
2375 // /* HeapReference<Class> */ temp = array->klass_
2376 __ Ldr(temp, HeapOperand(array, class_offset));
2377 codegen_->MaybeRecordImplicitNullCheck(instruction);
2378 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2379
2380 // /* HeapReference<Class> */ temp = temp->component_type_
2381 __ Ldr(temp, HeapOperand(temp, component_offset));
2382 // /* HeapReference<Class> */ temp2 = value->klass_
2383 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2384 // If heap poisoning is enabled, no need to unpoison `temp`
2385 // nor `temp2`, as we are comparing two poisoned references.
2386 __ Cmp(temp, temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01002387 temps.Release(temp2);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002388
2389 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002390 vixl::aarch64::Label do_put;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002391 __ B(eq, &do_put);
2392 // If heap poisoning is enabled, the `temp` reference has
2393 // not been unpoisoned yet; unpoison it now.
2394 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2395
2396 // /* HeapReference<Class> */ temp = temp->super_class_
2397 __ Ldr(temp, HeapOperand(temp, super_offset));
2398 // If heap poisoning is enabled, no need to unpoison
2399 // `temp`, as we are comparing against null below.
2400 __ Cbnz(temp, slow_path->GetEntryLabel());
2401 __ Bind(&do_put);
2402 } else {
2403 __ B(ne, slow_path->GetEntryLabel());
2404 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002405 }
2406 }
2407
2408 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002409 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002410 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002411 __ Mov(temp2, value.W());
2412 GetAssembler()->PoisonHeapReference(temp2);
2413 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002414 }
2415
2416 if (!index.IsConstant()) {
2417 __ Add(temp, array, offset);
2418 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002419 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002420
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002421 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002422 codegen_->MaybeRecordImplicitNullCheck(instruction);
2423 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002424 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002425
2426 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2427
2428 if (done.IsLinked()) {
2429 __ Bind(&done);
2430 }
2431
2432 if (slow_path != nullptr) {
2433 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002434 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002435 }
2436}
2437
Alexandre Rames67555f72014-11-18 10:55:16 +00002438void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002439 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2440 ? LocationSummary::kCallOnSlowPath
2441 : LocationSummary::kNoCall;
2442 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002443 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002444 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002445 if (instruction->HasUses()) {
2446 locations->SetOut(Location::SameAsFirstInput());
2447 }
2448}
2449
2450void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002451 BoundsCheckSlowPathARM64* slow_path =
2452 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002453 codegen_->AddSlowPath(slow_path);
2454
2455 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2456 __ B(slow_path->GetEntryLabel(), hs);
2457}
2458
Alexandre Rames67555f72014-11-18 10:55:16 +00002459void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2460 LocationSummary* locations =
2461 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2462 locations->SetInAt(0, Location::RequiresRegister());
2463 if (check->HasUses()) {
2464 locations->SetOut(Location::SameAsFirstInput());
2465 }
2466}
2467
2468void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2469 // We assume the class is not null.
2470 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2471 check->GetLoadClass(), check, check->GetDexPc(), true);
2472 codegen_->AddSlowPath(slow_path);
2473 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2474}
2475
Roland Levillain1a653882016-03-18 18:05:57 +00002476static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2477 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2478 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2479}
2480
2481void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2482 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2483 Location rhs_loc = instruction->GetLocations()->InAt(1);
2484 if (rhs_loc.IsConstant()) {
2485 // 0.0 is the only immediate that can be encoded directly in
2486 // an FCMP instruction.
2487 //
2488 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2489 // specify that in a floating-point comparison, positive zero
2490 // and negative zero are considered equal, so we can use the
2491 // literal 0.0 for both cases here.
2492 //
2493 // Note however that some methods (Float.equal, Float.compare,
2494 // Float.compareTo, Double.equal, Double.compare,
2495 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2496 // StrictMath.min) consider 0.0 to be (strictly) greater than
2497 // -0.0. So if we ever translate calls to these methods into a
2498 // HCompare instruction, we must handle the -0.0 case with
2499 // care here.
2500 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2501 __ Fcmp(lhs_reg, 0.0);
2502 } else {
2503 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2504 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002505}
2506
Serban Constantinescu02164b32014-11-13 14:05:07 +00002507void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002508 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002509 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2510 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002511 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002512 case Primitive::kPrimBoolean:
2513 case Primitive::kPrimByte:
2514 case Primitive::kPrimShort:
2515 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002516 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002517 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002518 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002519 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002520 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2521 break;
2522 }
2523 case Primitive::kPrimFloat:
2524 case Primitive::kPrimDouble: {
2525 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002526 locations->SetInAt(1,
2527 IsFloatingPointZeroConstant(compare->InputAt(1))
2528 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2529 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002530 locations->SetOut(Location::RequiresRegister());
2531 break;
2532 }
2533 default:
2534 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2535 }
2536}
2537
2538void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2539 Primitive::Type in_type = compare->InputAt(0)->GetType();
2540
2541 // 0 if: left == right
2542 // 1 if: left > right
2543 // -1 if: left < right
2544 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002545 case Primitive::kPrimBoolean:
2546 case Primitive::kPrimByte:
2547 case Primitive::kPrimShort:
2548 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002549 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002550 case Primitive::kPrimLong: {
2551 Register result = OutputRegister(compare);
2552 Register left = InputRegisterAt(compare, 0);
2553 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002554 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002555 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2556 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002557 break;
2558 }
2559 case Primitive::kPrimFloat:
2560 case Primitive::kPrimDouble: {
2561 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002562 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002563 __ Cset(result, ne);
2564 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002565 break;
2566 }
2567 default:
2568 LOG(FATAL) << "Unimplemented compare type " << in_type;
2569 }
2570}
2571
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002572void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002573 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002574
2575 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2576 locations->SetInAt(0, Location::RequiresFpuRegister());
2577 locations->SetInAt(1,
2578 IsFloatingPointZeroConstant(instruction->InputAt(1))
2579 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2580 : Location::RequiresFpuRegister());
2581 } else {
2582 // Integer cases.
2583 locations->SetInAt(0, Location::RequiresRegister());
2584 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2585 }
2586
David Brazdilb3e773e2016-01-26 11:28:37 +00002587 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002588 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002589 }
2590}
2591
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002592void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002593 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002594 return;
2595 }
2596
2597 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002598 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002599 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002600
Roland Levillain7f63c522015-07-13 15:54:55 +00002601 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002602 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002603 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002604 } else {
2605 // Integer cases.
2606 Register lhs = InputRegisterAt(instruction, 0);
2607 Operand rhs = InputOperandAt(instruction, 1);
2608 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002609 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002610 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002611}
2612
2613#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2614 M(Equal) \
2615 M(NotEqual) \
2616 M(LessThan) \
2617 M(LessThanOrEqual) \
2618 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002619 M(GreaterThanOrEqual) \
2620 M(Below) \
2621 M(BelowOrEqual) \
2622 M(Above) \
2623 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002624#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002625void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2626void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002627FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002628#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002629#undef FOR_EACH_CONDITION_INSTRUCTION
2630
Zheng Xuc6667102015-05-15 16:08:45 +08002631void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2632 DCHECK(instruction->IsDiv() || instruction->IsRem());
2633
2634 LocationSummary* locations = instruction->GetLocations();
2635 Location second = locations->InAt(1);
2636 DCHECK(second.IsConstant());
2637
2638 Register out = OutputRegister(instruction);
2639 Register dividend = InputRegisterAt(instruction, 0);
2640 int64_t imm = Int64FromConstant(second.GetConstant());
2641 DCHECK(imm == 1 || imm == -1);
2642
2643 if (instruction->IsRem()) {
2644 __ Mov(out, 0);
2645 } else {
2646 if (imm == 1) {
2647 __ Mov(out, dividend);
2648 } else {
2649 __ Neg(out, dividend);
2650 }
2651 }
2652}
2653
2654void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2655 DCHECK(instruction->IsDiv() || instruction->IsRem());
2656
2657 LocationSummary* locations = instruction->GetLocations();
2658 Location second = locations->InAt(1);
2659 DCHECK(second.IsConstant());
2660
2661 Register out = OutputRegister(instruction);
2662 Register dividend = InputRegisterAt(instruction, 0);
2663 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002664 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002665 int ctz_imm = CTZ(abs_imm);
2666
2667 UseScratchRegisterScope temps(GetVIXLAssembler());
2668 Register temp = temps.AcquireSameSizeAs(out);
2669
2670 if (instruction->IsDiv()) {
2671 __ Add(temp, dividend, abs_imm - 1);
2672 __ Cmp(dividend, 0);
2673 __ Csel(out, temp, dividend, lt);
2674 if (imm > 0) {
2675 __ Asr(out, out, ctz_imm);
2676 } else {
2677 __ Neg(out, Operand(out, ASR, ctz_imm));
2678 }
2679 } else {
2680 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2681 __ Asr(temp, dividend, bits - 1);
2682 __ Lsr(temp, temp, bits - ctz_imm);
2683 __ Add(out, dividend, temp);
2684 __ And(out, out, abs_imm - 1);
2685 __ Sub(out, out, temp);
2686 }
2687}
2688
2689void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2690 DCHECK(instruction->IsDiv() || instruction->IsRem());
2691
2692 LocationSummary* locations = instruction->GetLocations();
2693 Location second = locations->InAt(1);
2694 DCHECK(second.IsConstant());
2695
2696 Register out = OutputRegister(instruction);
2697 Register dividend = InputRegisterAt(instruction, 0);
2698 int64_t imm = Int64FromConstant(second.GetConstant());
2699
2700 Primitive::Type type = instruction->GetResultType();
2701 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2702
2703 int64_t magic;
2704 int shift;
2705 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2706
2707 UseScratchRegisterScope temps(GetVIXLAssembler());
2708 Register temp = temps.AcquireSameSizeAs(out);
2709
2710 // temp = get_high(dividend * magic)
2711 __ Mov(temp, magic);
2712 if (type == Primitive::kPrimLong) {
2713 __ Smulh(temp, dividend, temp);
2714 } else {
2715 __ Smull(temp.X(), dividend, temp);
2716 __ Lsr(temp.X(), temp.X(), 32);
2717 }
2718
2719 if (imm > 0 && magic < 0) {
2720 __ Add(temp, temp, dividend);
2721 } else if (imm < 0 && magic > 0) {
2722 __ Sub(temp, temp, dividend);
2723 }
2724
2725 if (shift != 0) {
2726 __ Asr(temp, temp, shift);
2727 }
2728
2729 if (instruction->IsDiv()) {
2730 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2731 } else {
2732 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2733 // TODO: Strength reduction for msub.
2734 Register temp_imm = temps.AcquireSameSizeAs(out);
2735 __ Mov(temp_imm, imm);
2736 __ Msub(out, temp, temp_imm, dividend);
2737 }
2738}
2739
2740void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2741 DCHECK(instruction->IsDiv() || instruction->IsRem());
2742 Primitive::Type type = instruction->GetResultType();
2743 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2744
2745 LocationSummary* locations = instruction->GetLocations();
2746 Register out = OutputRegister(instruction);
2747 Location second = locations->InAt(1);
2748
2749 if (second.IsConstant()) {
2750 int64_t imm = Int64FromConstant(second.GetConstant());
2751
2752 if (imm == 0) {
2753 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2754 } else if (imm == 1 || imm == -1) {
2755 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002756 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002757 DivRemByPowerOfTwo(instruction);
2758 } else {
2759 DCHECK(imm <= -2 || imm >= 2);
2760 GenerateDivRemWithAnyConstant(instruction);
2761 }
2762 } else {
2763 Register dividend = InputRegisterAt(instruction, 0);
2764 Register divisor = InputRegisterAt(instruction, 1);
2765 if (instruction->IsDiv()) {
2766 __ Sdiv(out, dividend, divisor);
2767 } else {
2768 UseScratchRegisterScope temps(GetVIXLAssembler());
2769 Register temp = temps.AcquireSameSizeAs(out);
2770 __ Sdiv(temp, dividend, divisor);
2771 __ Msub(out, temp, divisor, dividend);
2772 }
2773 }
2774}
2775
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002776void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2777 LocationSummary* locations =
2778 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2779 switch (div->GetResultType()) {
2780 case Primitive::kPrimInt:
2781 case Primitive::kPrimLong:
2782 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002783 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002784 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2785 break;
2786
2787 case Primitive::kPrimFloat:
2788 case Primitive::kPrimDouble:
2789 locations->SetInAt(0, Location::RequiresFpuRegister());
2790 locations->SetInAt(1, Location::RequiresFpuRegister());
2791 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2792 break;
2793
2794 default:
2795 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2796 }
2797}
2798
2799void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2800 Primitive::Type type = div->GetResultType();
2801 switch (type) {
2802 case Primitive::kPrimInt:
2803 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002804 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002805 break;
2806
2807 case Primitive::kPrimFloat:
2808 case Primitive::kPrimDouble:
2809 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2810 break;
2811
2812 default:
2813 LOG(FATAL) << "Unexpected div type " << type;
2814 }
2815}
2816
Alexandre Rames67555f72014-11-18 10:55:16 +00002817void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002818 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2819 ? LocationSummary::kCallOnSlowPath
2820 : LocationSummary::kNoCall;
2821 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002822 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2823 if (instruction->HasUses()) {
2824 locations->SetOut(Location::SameAsFirstInput());
2825 }
2826}
2827
2828void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2829 SlowPathCodeARM64* slow_path =
2830 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2831 codegen_->AddSlowPath(slow_path);
2832 Location value = instruction->GetLocations()->InAt(0);
2833
Alexandre Rames3e69f162014-12-10 10:36:50 +00002834 Primitive::Type type = instruction->GetType();
2835
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002836 if (!Primitive::IsIntegralType(type)) {
2837 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002838 return;
2839 }
2840
Alexandre Rames67555f72014-11-18 10:55:16 +00002841 if (value.IsConstant()) {
2842 int64_t divisor = Int64ConstantFrom(value);
2843 if (divisor == 0) {
2844 __ B(slow_path->GetEntryLabel());
2845 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002846 // A division by a non-null constant is valid. We don't need to perform
2847 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002848 }
2849 } else {
2850 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2851 }
2852}
2853
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002854void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2855 LocationSummary* locations =
2856 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2857 locations->SetOut(Location::ConstantLocation(constant));
2858}
2859
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002860void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2861 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002862 // Will be generated at use site.
2863}
2864
Alexandre Rames5319def2014-10-23 10:03:10 +01002865void LocationsBuilderARM64::VisitExit(HExit* exit) {
2866 exit->SetLocations(nullptr);
2867}
2868
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002869void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002870}
2871
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002872void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2873 LocationSummary* locations =
2874 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2875 locations->SetOut(Location::ConstantLocation(constant));
2876}
2877
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002878void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002879 // Will be generated at use site.
2880}
2881
David Brazdilfc6a86a2015-06-26 10:33:45 +00002882void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002883 DCHECK(!successor->IsExitBlock());
2884 HBasicBlock* block = got->GetBlock();
2885 HInstruction* previous = got->GetPrevious();
2886 HLoopInformation* info = block->GetLoopInformation();
2887
David Brazdil46e2a392015-03-16 17:31:52 +00002888 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002889 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2890 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2891 return;
2892 }
2893 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2894 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2895 }
2896 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002897 __ B(codegen_->GetLabelOf(successor));
2898 }
2899}
2900
David Brazdilfc6a86a2015-06-26 10:33:45 +00002901void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2902 got->SetLocations(nullptr);
2903}
2904
2905void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2906 HandleGoto(got, got->GetSuccessor());
2907}
2908
2909void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2910 try_boundary->SetLocations(nullptr);
2911}
2912
2913void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2914 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2915 if (!successor->IsExitBlock()) {
2916 HandleGoto(try_boundary, successor);
2917 }
2918}
2919
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002920void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002921 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002922 vixl::aarch64::Label* true_target,
2923 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002924 // FP branching requires both targets to be explicit. If either of the targets
2925 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002926 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002927 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002928
David Brazdil0debae72015-11-12 18:37:00 +00002929 if (true_target == nullptr && false_target == nullptr) {
2930 // Nothing to do. The code always falls through.
2931 return;
2932 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002933 // Constant condition, statically compared against "true" (integer value 1).
2934 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002935 if (true_target != nullptr) {
2936 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002937 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002938 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002939 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002940 if (false_target != nullptr) {
2941 __ B(false_target);
2942 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002943 }
David Brazdil0debae72015-11-12 18:37:00 +00002944 return;
2945 }
2946
2947 // The following code generates these patterns:
2948 // (1) true_target == nullptr && false_target != nullptr
2949 // - opposite condition true => branch to false_target
2950 // (2) true_target != nullptr && false_target == nullptr
2951 // - condition true => branch to true_target
2952 // (3) true_target != nullptr && false_target != nullptr
2953 // - condition true => branch to true_target
2954 // - branch to false_target
2955 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002956 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002957 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002958 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002959 if (true_target == nullptr) {
2960 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2961 } else {
2962 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2963 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002964 } else {
2965 // The condition instruction has not been materialized, use its inputs as
2966 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002967 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002968
David Brazdil0debae72015-11-12 18:37:00 +00002969 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002970 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002971 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002972 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002973 IfCondition opposite_condition = condition->GetOppositeCondition();
2974 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002975 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002976 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002977 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002978 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002979 // Integer cases.
2980 Register lhs = InputRegisterAt(condition, 0);
2981 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002982
2983 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01002984 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002985 if (true_target == nullptr) {
2986 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2987 non_fallthrough_target = false_target;
2988 } else {
2989 arm64_cond = ARM64Condition(condition->GetCondition());
2990 non_fallthrough_target = true_target;
2991 }
2992
Aart Bik086d27e2016-01-20 17:02:00 -08002993 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01002994 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002995 switch (arm64_cond) {
2996 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002997 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002998 break;
2999 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003000 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003001 break;
3002 case lt:
3003 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003004 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003005 break;
3006 case ge:
3007 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003008 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003009 break;
3010 default:
3011 // Without the `static_cast` the compiler throws an error for
3012 // `-Werror=sign-promo`.
3013 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3014 }
3015 } else {
3016 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003017 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003018 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003019 }
3020 }
David Brazdil0debae72015-11-12 18:37:00 +00003021
3022 // If neither branch falls through (case 3), the conditional branch to `true_target`
3023 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3024 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003025 __ B(false_target);
3026 }
David Brazdil0debae72015-11-12 18:37:00 +00003027
3028 if (fallthrough_target.IsLinked()) {
3029 __ Bind(&fallthrough_target);
3030 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003031}
3032
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003033void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
3034 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003035 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003036 locations->SetInAt(0, Location::RequiresRegister());
3037 }
3038}
3039
3040void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003041 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3042 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003043 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3044 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3045 true_target = nullptr;
3046 }
3047 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3048 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3049 false_target = nullptr;
3050 }
David Brazdil0debae72015-11-12 18:37:00 +00003051 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003052}
3053
3054void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
3055 LocationSummary* locations = new (GetGraph()->GetArena())
3056 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00003057 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003058 locations->SetInAt(0, Location::RequiresRegister());
3059 }
3060}
3061
3062void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003063 SlowPathCodeARM64* slow_path =
3064 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003065 GenerateTestAndBranch(deoptimize,
3066 /* condition_input_index */ 0,
3067 slow_path->GetEntryLabel(),
3068 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003069}
3070
David Brazdilc0b601b2016-02-08 14:20:45 +00003071static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3072 return condition->IsCondition() &&
3073 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
3074}
3075
Alexandre Rames880f1192016-06-13 16:04:50 +01003076static inline Condition GetConditionForSelect(HCondition* condition) {
3077 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003078 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3079 : ARM64Condition(cond);
3080}
3081
David Brazdil74eb1b22015-12-14 11:44:01 +00003082void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3083 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01003084 if (Primitive::IsFloatingPointType(select->GetType())) {
3085 locations->SetInAt(0, Location::RequiresFpuRegister());
3086 locations->SetInAt(1, Location::RequiresFpuRegister());
3087 locations->SetOut(Location::RequiresFpuRegister());
3088 } else {
3089 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3090 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3091 bool is_true_value_constant = cst_true_value != nullptr;
3092 bool is_false_value_constant = cst_false_value != nullptr;
3093 // Ask VIXL whether we should synthesize constants in registers.
3094 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3095 Operand true_op = is_true_value_constant ?
3096 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3097 Operand false_op = is_false_value_constant ?
3098 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3099 bool true_value_in_register = false;
3100 bool false_value_in_register = false;
3101 MacroAssembler::GetCselSynthesisInformation(
3102 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3103 true_value_in_register |= !is_true_value_constant;
3104 false_value_in_register |= !is_false_value_constant;
3105
3106 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3107 : Location::ConstantLocation(cst_true_value));
3108 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3109 : Location::ConstantLocation(cst_false_value));
3110 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003111 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003112
David Brazdil74eb1b22015-12-14 11:44:01 +00003113 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3114 locations->SetInAt(2, Location::RequiresRegister());
3115 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003116}
3117
3118void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003119 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003120 Condition csel_cond;
3121
3122 if (IsBooleanValueOrMaterializedCondition(cond)) {
3123 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003124 // Use the condition flags set by the previous instruction.
3125 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003126 } else {
3127 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003128 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003129 }
3130 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003131 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003132 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003133 } else {
3134 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003135 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003136 }
3137
Alexandre Rames880f1192016-06-13 16:04:50 +01003138 if (Primitive::IsFloatingPointType(select->GetType())) {
3139 __ Fcsel(OutputFPRegister(select),
3140 InputFPRegisterAt(select, 1),
3141 InputFPRegisterAt(select, 0),
3142 csel_cond);
3143 } else {
3144 __ Csel(OutputRegister(select),
3145 InputOperandAt(select, 1),
3146 InputOperandAt(select, 0),
3147 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003148 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003149}
3150
David Srbecky0cf44932015-12-09 14:09:59 +00003151void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3152 new (GetGraph()->GetArena()) LocationSummary(info);
3153}
3154
David Srbeckyd28f4a02016-03-14 17:14:24 +00003155void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3156 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003157}
3158
3159void CodeGeneratorARM64::GenerateNop() {
3160 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003161}
3162
Alexandre Rames5319def2014-10-23 10:03:10 +01003163void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003164 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003165}
3166
3167void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003168 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003169}
3170
3171void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003172 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003173}
3174
3175void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003176 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003177}
3178
Roland Levillain44015862016-01-22 11:47:17 +00003179static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3180 return kEmitCompilerReadBarrier &&
3181 (kUseBakerReadBarrier ||
3182 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3183 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3184 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3185}
3186
Alexandre Rames67555f72014-11-18 10:55:16 +00003187void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003188 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003189 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3190 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003191 case TypeCheckKind::kExactCheck:
3192 case TypeCheckKind::kAbstractClassCheck:
3193 case TypeCheckKind::kClassHierarchyCheck:
3194 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003195 call_kind =
3196 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003197 break;
3198 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003199 case TypeCheckKind::kUnresolvedCheck:
3200 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003201 call_kind = LocationSummary::kCallOnSlowPath;
3202 break;
3203 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003204
Alexandre Rames67555f72014-11-18 10:55:16 +00003205 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003206 locations->SetInAt(0, Location::RequiresRegister());
3207 locations->SetInAt(1, Location::RequiresRegister());
3208 // The "out" register is used as a temporary, so it overlaps with the inputs.
3209 // Note that TypeCheckSlowPathARM64 uses this register too.
3210 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3211 // When read barriers are enabled, we need a temporary register for
3212 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003213 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003214 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003215 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003216}
3217
3218void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003219 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003220 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003221 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003222 Register obj = InputRegisterAt(instruction, 0);
3223 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003224 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003225 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003226 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3227 locations->GetTemp(0) :
3228 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003229 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3230 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3231 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3232 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003233
Scott Wakeling97c72b72016-06-24 16:19:36 +01003234 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003235 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003236
3237 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003238 // Avoid null check if we know `obj` is not null.
3239 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003240 __ Cbz(obj, &zero);
3241 }
3242
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003243 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003244 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003245
Roland Levillain44015862016-01-22 11:47:17 +00003246 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003247 case TypeCheckKind::kExactCheck: {
3248 __ Cmp(out, cls);
3249 __ Cset(out, eq);
3250 if (zero.IsLinked()) {
3251 __ B(&done);
3252 }
3253 break;
3254 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003255
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003256 case TypeCheckKind::kAbstractClassCheck: {
3257 // If the class is abstract, we eagerly fetch the super class of the
3258 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003259 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003260 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003261 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003262 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003263 // If `out` is null, we use it for the result, and jump to `done`.
3264 __ Cbz(out, &done);
3265 __ Cmp(out, cls);
3266 __ B(ne, &loop);
3267 __ Mov(out, 1);
3268 if (zero.IsLinked()) {
3269 __ B(&done);
3270 }
3271 break;
3272 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003273
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003274 case TypeCheckKind::kClassHierarchyCheck: {
3275 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003276 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003277 __ Bind(&loop);
3278 __ Cmp(out, cls);
3279 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003280 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003281 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003282 __ Cbnz(out, &loop);
3283 // If `out` is null, we use it for the result, and jump to `done`.
3284 __ B(&done);
3285 __ Bind(&success);
3286 __ Mov(out, 1);
3287 if (zero.IsLinked()) {
3288 __ B(&done);
3289 }
3290 break;
3291 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003292
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003293 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003294 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003295 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003296 __ Cmp(out, cls);
3297 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003298 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003299 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003300 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003301 // If `out` is null, we use it for the result, and jump to `done`.
3302 __ Cbz(out, &done);
3303 __ Ldrh(out, HeapOperand(out, primitive_offset));
3304 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3305 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003306 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003307 __ Mov(out, 1);
3308 __ B(&done);
3309 break;
3310 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003311
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003312 case TypeCheckKind::kArrayCheck: {
3313 __ Cmp(out, cls);
3314 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003315 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3316 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003317 codegen_->AddSlowPath(slow_path);
3318 __ B(ne, slow_path->GetEntryLabel());
3319 __ Mov(out, 1);
3320 if (zero.IsLinked()) {
3321 __ B(&done);
3322 }
3323 break;
3324 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003325
Calin Juravle98893e12015-10-02 21:05:03 +01003326 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003327 case TypeCheckKind::kInterfaceCheck: {
3328 // Note that we indeed only call on slow path, but we always go
3329 // into the slow path for the unresolved and interface check
3330 // cases.
3331 //
3332 // We cannot directly call the InstanceofNonTrivial runtime
3333 // entry point without resorting to a type checking slow path
3334 // here (i.e. by calling InvokeRuntime directly), as it would
3335 // require to assign fixed registers for the inputs of this
3336 // HInstanceOf instruction (following the runtime calling
3337 // convention), which might be cluttered by the potential first
3338 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003339 //
3340 // TODO: Introduce a new runtime entry point taking the object
3341 // to test (instead of its class) as argument, and let it deal
3342 // with the read barrier issues. This will let us refactor this
3343 // case of the `switch` code as it was previously (with a direct
3344 // call to the runtime not using a type checking slow path).
3345 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003346 DCHECK(locations->OnlyCallsOnSlowPath());
3347 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3348 /* is_fatal */ false);
3349 codegen_->AddSlowPath(slow_path);
3350 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003351 if (zero.IsLinked()) {
3352 __ B(&done);
3353 }
3354 break;
3355 }
3356 }
3357
3358 if (zero.IsLinked()) {
3359 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003360 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003361 }
3362
3363 if (done.IsLinked()) {
3364 __ Bind(&done);
3365 }
3366
3367 if (slow_path != nullptr) {
3368 __ Bind(slow_path->GetExitLabel());
3369 }
3370}
3371
3372void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3373 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3374 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3375
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003376 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3377 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003378 case TypeCheckKind::kExactCheck:
3379 case TypeCheckKind::kAbstractClassCheck:
3380 case TypeCheckKind::kClassHierarchyCheck:
3381 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003382 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3383 LocationSummary::kCallOnSlowPath :
3384 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003385 break;
3386 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003387 case TypeCheckKind::kUnresolvedCheck:
3388 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003389 call_kind = LocationSummary::kCallOnSlowPath;
3390 break;
3391 }
3392
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003393 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3394 locations->SetInAt(0, Location::RequiresRegister());
3395 locations->SetInAt(1, Location::RequiresRegister());
3396 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3397 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003398 // When read barriers are enabled, we need an additional temporary
3399 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003400 if (TypeCheckNeedsATemporary(type_check_kind)) {
3401 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003402 }
3403}
3404
3405void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003406 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003407 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003408 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003409 Register obj = InputRegisterAt(instruction, 0);
3410 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003411 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003412 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3413 locations->GetTemp(1) :
3414 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003415 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003416 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3417 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3418 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3419 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003420
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003421 bool is_type_check_slow_path_fatal =
3422 (type_check_kind == TypeCheckKind::kExactCheck ||
3423 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3424 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3425 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3426 !instruction->CanThrowIntoCatchBlock();
3427 SlowPathCodeARM64* type_check_slow_path =
3428 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3429 is_type_check_slow_path_fatal);
3430 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003431
Scott Wakeling97c72b72016-06-24 16:19:36 +01003432 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003433 // Avoid null check if we know obj is not null.
3434 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003435 __ Cbz(obj, &done);
3436 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003437
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003438 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003439 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003440
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003441 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003442 case TypeCheckKind::kExactCheck:
3443 case TypeCheckKind::kArrayCheck: {
3444 __ Cmp(temp, cls);
3445 // Jump to slow path for throwing the exception or doing a
3446 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003447 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003448 break;
3449 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003450
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003451 case TypeCheckKind::kAbstractClassCheck: {
3452 // If the class is abstract, we eagerly fetch the super class of the
3453 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003454 vixl::aarch64::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003455 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003456 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003457 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003458
3459 // If the class reference currently in `temp` is not null, jump
3460 // to the `compare_classes` label to compare it with the checked
3461 // class.
3462 __ Cbnz(temp, &compare_classes);
3463 // Otherwise, jump to the slow path to throw the exception.
3464 //
3465 // But before, move back the object's class into `temp` before
3466 // going into the slow path, as it has been overwritten in the
3467 // meantime.
3468 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003469 GenerateReferenceLoadTwoRegisters(
3470 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003471 __ B(type_check_slow_path->GetEntryLabel());
3472
3473 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003474 __ Cmp(temp, cls);
3475 __ B(ne, &loop);
3476 break;
3477 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003478
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003479 case TypeCheckKind::kClassHierarchyCheck: {
3480 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003481 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003482 __ Bind(&loop);
3483 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003484 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003485
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003486 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003487 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003488
3489 // If the class reference currently in `temp` is not null, jump
3490 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003491 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003492 // Otherwise, jump to the slow path to throw the exception.
3493 //
3494 // But before, move back the object's class into `temp` before
3495 // going into the slow path, as it has been overwritten in the
3496 // meantime.
3497 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003498 GenerateReferenceLoadTwoRegisters(
3499 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003500 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003501 break;
3502 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003503
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003504 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003505 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003506 vixl::aarch64::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003507 __ Cmp(temp, cls);
3508 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003509
3510 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003511 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003512 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003513
3514 // If the component type is not null (i.e. the object is indeed
3515 // an array), jump to label `check_non_primitive_component_type`
3516 // to further check that this component type is not a primitive
3517 // type.
3518 __ Cbnz(temp, &check_non_primitive_component_type);
3519 // Otherwise, jump to the slow path to throw the exception.
3520 //
3521 // But before, move back the object's class into `temp` before
3522 // going into the slow path, as it has been overwritten in the
3523 // meantime.
3524 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003525 GenerateReferenceLoadTwoRegisters(
3526 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003527 __ B(type_check_slow_path->GetEntryLabel());
3528
3529 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003530 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3531 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003532 __ Cbz(temp, &done);
3533 // Same comment as above regarding `temp` and the slow path.
3534 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003535 GenerateReferenceLoadTwoRegisters(
3536 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003537 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003538 break;
3539 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003540
Calin Juravle98893e12015-10-02 21:05:03 +01003541 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003542 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003543 // We always go into the type check slow path for the unresolved
3544 // and interface check cases.
3545 //
3546 // We cannot directly call the CheckCast runtime entry point
3547 // without resorting to a type checking slow path here (i.e. by
3548 // calling InvokeRuntime directly), as it would require to
3549 // assign fixed registers for the inputs of this HInstanceOf
3550 // instruction (following the runtime calling convention), which
3551 // might be cluttered by the potential first read barrier
3552 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003553 //
3554 // TODO: Introduce a new runtime entry point taking the object
3555 // to test (instead of its class) as argument, and let it deal
3556 // with the read barrier issues. This will let us refactor this
3557 // case of the `switch` code as it was previously (with a direct
3558 // call to the runtime not using a type checking slow path).
3559 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003560 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003561 break;
3562 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003563 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003564
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003565 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003566}
3567
Alexandre Rames5319def2014-10-23 10:03:10 +01003568void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3569 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3570 locations->SetOut(Location::ConstantLocation(constant));
3571}
3572
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003573void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003574 // Will be generated at use site.
3575}
3576
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003577void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3578 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3579 locations->SetOut(Location::ConstantLocation(constant));
3580}
3581
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003582void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003583 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003584}
3585
Calin Juravle175dc732015-08-25 15:42:32 +01003586void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3587 // The trampoline uses the same calling convention as dex calling conventions,
3588 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3589 // the method_idx.
3590 HandleInvoke(invoke);
3591}
3592
3593void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3594 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3595}
3596
Alexandre Rames5319def2014-10-23 10:03:10 +01003597void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003598 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003599 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003600}
3601
Alexandre Rames67555f72014-11-18 10:55:16 +00003602void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3603 HandleInvoke(invoke);
3604}
3605
3606void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3607 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003608 LocationSummary* locations = invoke->GetLocations();
3609 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003610 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003611 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003612 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003613
3614 // The register ip1 is required to be used for the hidden argument in
3615 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003616 MacroAssembler* masm = GetVIXLAssembler();
3617 UseScratchRegisterScope scratch_scope(masm);
3618 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003619 scratch_scope.Exclude(ip1);
3620 __ Mov(ip1, invoke->GetDexMethodIndex());
3621
Alexandre Rames67555f72014-11-18 10:55:16 +00003622 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003623 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003624 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003625 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003626 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003627 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003628 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003629 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003630 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003631 // Instead of simply (possibly) unpoisoning `temp` here, we should
3632 // emit a read barrier for the previous class reference load.
3633 // However this is not required in practice, as this is an
3634 // intermediate/temporary reference and because the current
3635 // concurrent copying collector keeps the from-space memory
3636 // intact/accessible until the end of the marking phase (the
3637 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003638 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003639 __ Ldr(temp,
3640 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3641 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003642 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003643 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003644 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003645 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003646 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003647 // lr();
3648 __ Blr(lr);
3649 DCHECK(!codegen_->IsLeafMethod());
3650 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3651}
3652
3653void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003654 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3655 if (intrinsic.TryDispatch(invoke)) {
3656 return;
3657 }
3658
Alexandre Rames67555f72014-11-18 10:55:16 +00003659 HandleInvoke(invoke);
3660}
3661
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003662void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003663 // Explicit clinit checks triggered by static invokes must have been pruned by
3664 // art::PrepareForRegisterAllocation.
3665 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003666
Andreas Gampe878d58c2015-01-15 23:24:00 -08003667 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3668 if (intrinsic.TryDispatch(invoke)) {
3669 return;
3670 }
3671
Alexandre Rames67555f72014-11-18 10:55:16 +00003672 HandleInvoke(invoke);
3673}
3674
Andreas Gampe878d58c2015-01-15 23:24:00 -08003675static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3676 if (invoke->GetLocations()->Intrinsified()) {
3677 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3678 intrinsic.Dispatch(invoke);
3679 return true;
3680 }
3681 return false;
3682}
3683
Vladimir Markodc151b22015-10-15 18:02:30 +01003684HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3685 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3686 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003687 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003688 return desired_dispatch_info;
3689}
3690
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003691void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003692 // For better instruction scheduling we load the direct code pointer before the method pointer.
3693 bool direct_code_loaded = false;
3694 switch (invoke->GetCodePtrLocation()) {
3695 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3696 // LR = code address from literal pool with link-time patch.
3697 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3698 direct_code_loaded = true;
3699 break;
3700 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3701 // LR = invoke->GetDirectCodePtr();
3702 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3703 direct_code_loaded = true;
3704 break;
3705 default:
3706 break;
3707 }
3708
Andreas Gampe878d58c2015-01-15 23:24:00 -08003709 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003710 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3711 switch (invoke->GetMethodLoadKind()) {
3712 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3713 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003714 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003715 break;
3716 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003717 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003718 break;
3719 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3720 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003721 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003722 break;
3723 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3724 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003725 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003726 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3727 break;
3728 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3729 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003730 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3731 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003732 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003733 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003734 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003735 __ Bind(adrp_label);
3736 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003737 }
Vladimir Marko58155012015-08-19 12:49:41 +00003738 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003739 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003740 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003741 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003742 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003743 __ Bind(ldr_label);
3744 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003745 }
Vladimir Marko58155012015-08-19 12:49:41 +00003746 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003747 }
Vladimir Marko58155012015-08-19 12:49:41 +00003748 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003749 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003750 Register reg = XRegisterFrom(temp);
3751 Register method_reg;
3752 if (current_method.IsRegister()) {
3753 method_reg = XRegisterFrom(current_method);
3754 } else {
3755 DCHECK(invoke->GetLocations()->Intrinsified());
3756 DCHECK(!current_method.IsValid());
3757 method_reg = reg;
3758 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3759 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003760
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003761 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003762 __ Ldr(reg.X(),
3763 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07003764 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003765 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003766 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3767 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003768 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3769 break;
3770 }
3771 }
3772
3773 switch (invoke->GetCodePtrLocation()) {
3774 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3775 __ Bl(&frame_entry_label_);
3776 break;
3777 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3778 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003779 vixl::aarch64::Label* label = &relative_call_patches_.back().label;
3780 SingleEmissionCheckScope guard(GetVIXLAssembler());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003781 __ Bind(label);
3782 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003783 break;
3784 }
3785 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3786 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3787 // LR prepared above for better instruction scheduling.
3788 DCHECK(direct_code_loaded);
3789 // lr()
3790 __ Blr(lr);
3791 break;
3792 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3793 // LR = callee_method->entry_point_from_quick_compiled_code_;
3794 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003795 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07003796 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003797 // lr()
3798 __ Blr(lr);
3799 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003800 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003801
Andreas Gampe878d58c2015-01-15 23:24:00 -08003802 DCHECK(!IsLeafMethod());
3803}
3804
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003805void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003806 // Use the calling convention instead of the location of the receiver, as
3807 // intrinsics may have put the receiver in a different register. In the intrinsics
3808 // slow path, the arguments have been moved to the right place, so here we are
3809 // guaranteed that the receiver is the first register of the calling convention.
3810 InvokeDexCallingConvention calling_convention;
3811 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003812 Register temp = XRegisterFrom(temp_in);
3813 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3814 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3815 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003816 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003817
3818 BlockPoolsScope block_pools(GetVIXLAssembler());
3819
3820 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003821 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003822 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003823 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003824 // Instead of simply (possibly) unpoisoning `temp` here, we should
3825 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003826 // intermediate/temporary reference and because the current
3827 // concurrent copying collector keeps the from-space memory
3828 // intact/accessible until the end of the marking phase (the
3829 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003830 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3831 // temp = temp->GetMethodAt(method_offset);
3832 __ Ldr(temp, MemOperand(temp, method_offset));
3833 // lr = temp->GetEntryPoint();
3834 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3835 // lr();
3836 __ Blr(lr);
3837}
3838
Scott Wakeling97c72b72016-06-24 16:19:36 +01003839vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
3840 const DexFile& dex_file,
3841 uint32_t string_index,
3842 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003843 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3844}
3845
Scott Wakeling97c72b72016-06-24 16:19:36 +01003846vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
3847 const DexFile& dex_file,
3848 uint32_t type_index,
3849 vixl::aarch64::Label* adrp_label) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003850 return NewPcRelativePatch(dex_file, type_index, adrp_label, &pc_relative_type_patches_);
3851}
3852
Scott Wakeling97c72b72016-06-24 16:19:36 +01003853vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
3854 const DexFile& dex_file,
3855 uint32_t element_offset,
3856 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003857 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3858}
3859
Scott Wakeling97c72b72016-06-24 16:19:36 +01003860vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
3861 const DexFile& dex_file,
3862 uint32_t offset_or_index,
3863 vixl::aarch64::Label* adrp_label,
3864 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003865 // Add a patch entry and return the label.
3866 patches->emplace_back(dex_file, offset_or_index);
3867 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003868 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003869 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3870 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3871 return label;
3872}
3873
Scott Wakeling97c72b72016-06-24 16:19:36 +01003874vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003875 const DexFile& dex_file, uint32_t string_index) {
3876 return boot_image_string_patches_.GetOrCreate(
3877 StringReference(&dex_file, string_index),
3878 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3879}
3880
Scott Wakeling97c72b72016-06-24 16:19:36 +01003881vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003882 const DexFile& dex_file, uint32_t type_index) {
3883 return boot_image_type_patches_.GetOrCreate(
3884 TypeReference(&dex_file, type_index),
3885 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3886}
3887
Scott Wakeling97c72b72016-06-24 16:19:36 +01003888vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
3889 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003890 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3891 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3892 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3893}
3894
Scott Wakeling97c72b72016-06-24 16:19:36 +01003895vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(
3896 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003897 return DeduplicateUint64Literal(address);
3898}
3899
Vladimir Marko58155012015-08-19 12:49:41 +00003900void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3901 DCHECK(linker_patches->empty());
3902 size_t size =
3903 method_patches_.size() +
3904 call_patches_.size() +
3905 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003906 pc_relative_dex_cache_patches_.size() +
3907 boot_image_string_patches_.size() +
3908 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003909 boot_image_type_patches_.size() +
3910 pc_relative_type_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003911 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003912 linker_patches->reserve(size);
3913 for (const auto& entry : method_patches_) {
3914 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003915 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3916 linker_patches->push_back(LinkerPatch::MethodPatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003917 target_method.dex_file,
3918 target_method.dex_method_index));
3919 }
3920 for (const auto& entry : call_patches_) {
3921 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003922 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3923 linker_patches->push_back(LinkerPatch::CodePatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003924 target_method.dex_file,
3925 target_method.dex_method_index));
3926 }
Scott Wakeling97c72b72016-06-24 16:19:36 +01003927 for (const MethodPatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) {
3928 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003929 info.target_method.dex_file,
3930 info.target_method.dex_method_index));
3931 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003932 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003933 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003934 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003935 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003936 info.offset_or_index));
3937 }
3938 for (const auto& entry : boot_image_string_patches_) {
3939 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003940 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3941 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003942 target_string.dex_file,
3943 target_string.string_index));
3944 }
3945 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003946 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003947 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003948 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003949 info.offset_or_index));
3950 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003951 for (const auto& entry : boot_image_type_patches_) {
3952 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003953 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3954 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003955 target_type.dex_file,
3956 target_type.type_index));
3957 }
3958 for (const PcRelativePatchInfo& info : pc_relative_type_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003959 linker_patches->push_back(LinkerPatch::RelativeTypePatch(info.label.GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003960 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003961 info.pc_insn_label->GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003962 info.offset_or_index));
3963 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003964 for (const auto& entry : boot_image_address_patches_) {
3965 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003966 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3967 linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003968 }
3969}
3970
Scott Wakeling97c72b72016-06-24 16:19:36 +01003971vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003972 Uint32ToLiteralMap* map) {
3973 return map->GetOrCreate(
3974 value,
3975 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3976}
3977
Scott Wakeling97c72b72016-06-24 16:19:36 +01003978vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003979 return uint64_literals_.GetOrCreate(
3980 value,
3981 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003982}
3983
Scott Wakeling97c72b72016-06-24 16:19:36 +01003984vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003985 MethodReference target_method,
3986 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003987 return map->GetOrCreate(
3988 target_method,
3989 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003990}
3991
Scott Wakeling97c72b72016-06-24 16:19:36 +01003992vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003993 MethodReference target_method) {
3994 return DeduplicateMethodLiteral(target_method, &method_patches_);
3995}
3996
Scott Wakeling97c72b72016-06-24 16:19:36 +01003997vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003998 MethodReference target_method) {
3999 return DeduplicateMethodLiteral(target_method, &call_patches_);
4000}
4001
4002
Andreas Gampe878d58c2015-01-15 23:24:00 -08004003void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004004 // Explicit clinit checks triggered by static invokes must have been pruned by
4005 // art::PrepareForRegisterAllocation.
4006 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004007
Andreas Gampe878d58c2015-01-15 23:24:00 -08004008 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4009 return;
4010 }
4011
Alexandre Ramesd921d642015-04-16 15:07:16 +01004012 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004013 LocationSummary* locations = invoke->GetLocations();
4014 codegen_->GenerateStaticOrDirectCall(
4015 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00004016 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01004017}
4018
4019void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004020 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4021 return;
4022 }
4023
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004024 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004025 DCHECK(!codegen_->IsLeafMethod());
4026 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4027}
4028
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004029HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4030 HLoadClass::LoadKind desired_class_load_kind) {
4031 if (kEmitCompilerReadBarrier) {
4032 switch (desired_class_load_kind) {
4033 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4034 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
4035 case HLoadClass::LoadKind::kBootImageAddress:
4036 // TODO: Implement for read barrier.
4037 return HLoadClass::LoadKind::kDexCacheViaMethod;
4038 default:
4039 break;
4040 }
4041 }
4042 switch (desired_class_load_kind) {
4043 case HLoadClass::LoadKind::kReferrersClass:
4044 break;
4045 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4046 DCHECK(!GetCompilerOptions().GetCompilePic());
4047 break;
4048 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
4049 DCHECK(GetCompilerOptions().GetCompilePic());
4050 break;
4051 case HLoadClass::LoadKind::kBootImageAddress:
4052 break;
4053 case HLoadClass::LoadKind::kDexCacheAddress:
4054 DCHECK(Runtime::Current()->UseJitCompilation());
4055 break;
4056 case HLoadClass::LoadKind::kDexCachePcRelative:
4057 DCHECK(!Runtime::Current()->UseJitCompilation());
4058 break;
4059 case HLoadClass::LoadKind::kDexCacheViaMethod:
4060 break;
4061 }
4062 return desired_class_load_kind;
4063}
4064
Alexandre Rames67555f72014-11-18 10:55:16 +00004065void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004066 if (cls->NeedsAccessCheck()) {
4067 InvokeRuntimeCallingConvention calling_convention;
4068 CodeGenerator::CreateLoadClassLocationSummary(
4069 cls,
4070 LocationFrom(calling_convention.GetRegisterAt(0)),
Scott Wakeling97c72b72016-06-24 16:19:36 +01004071 LocationFrom(vixl::aarch64::x0),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004072 /* code_generator_supports_read_barrier */ true);
4073 return;
4074 }
4075
4076 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
4077 ? LocationSummary::kCallOnSlowPath
4078 : LocationSummary::kNoCall;
4079 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
4080 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4081 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
4082 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
4083 locations->SetInAt(0, Location::RequiresRegister());
4084 }
4085 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004086}
4087
4088void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01004089 if (cls->NeedsAccessCheck()) {
4090 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004091 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004092 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01004093 return;
4094 }
4095
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004096 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004097 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004098
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004099 bool generate_null_check = false;
4100 switch (cls->GetLoadKind()) {
4101 case HLoadClass::LoadKind::kReferrersClass: {
4102 DCHECK(!cls->CanCallRuntime());
4103 DCHECK(!cls->MustGenerateClinitCheck());
4104 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4105 Register current_method = InputRegisterAt(cls, 0);
4106 GenerateGcRootFieldLoad(
4107 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4108 break;
4109 }
4110 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4111 DCHECK(!kEmitCompilerReadBarrier);
4112 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4113 cls->GetTypeIndex()));
4114 break;
4115 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
4116 DCHECK(!kEmitCompilerReadBarrier);
4117 // Add ADRP with its PC-relative type patch.
4118 const DexFile& dex_file = cls->GetDexFile();
4119 uint32_t type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004120 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004121 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004122 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004123 __ Bind(adrp_label);
4124 __ adrp(out.X(), /* offset placeholder */ 0);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004125 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004126 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004127 vixl::aarch64::Label* add_label =
4128 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004129 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004130 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004131 __ Bind(add_label);
4132 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004133 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004134 break;
4135 }
4136 case HLoadClass::LoadKind::kBootImageAddress: {
4137 DCHECK(!kEmitCompilerReadBarrier);
4138 DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
4139 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
4140 break;
4141 }
4142 case HLoadClass::LoadKind::kDexCacheAddress: {
4143 DCHECK_NE(cls->GetAddress(), 0u);
4144 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4145 // that gives a 16KiB range. To try and reduce the number of literals if we load
4146 // multiple types, simply split the dex cache address to a 16KiB aligned base
4147 // loaded from a literal and the remaining offset embedded in the load.
4148 static_assert(sizeof(GcRoot<mirror::Class>) == 4u, "Expected GC root to be 4 bytes.");
4149 DCHECK_ALIGNED(cls->GetAddress(), 4u);
4150 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4151 uint64_t base_address = cls->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4152 uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits);
4153 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4154 // /* GcRoot<mirror::Class> */ out = *(base_address + offset)
4155 GenerateGcRootFieldLoad(cls, out_loc, out.X(), offset);
4156 generate_null_check = !cls->IsInDexCache();
4157 break;
4158 }
4159 case HLoadClass::LoadKind::kDexCachePcRelative: {
4160 // Add ADRP with its PC-relative DexCache access patch.
4161 const DexFile& dex_file = cls->GetDexFile();
4162 uint32_t element_offset = cls->GetDexCacheElementOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004163 vixl::aarch64::Label* adrp_label =
4164 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004165 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004166 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004167 __ Bind(adrp_label);
4168 __ adrp(out.X(), /* offset placeholder */ 0);
4169 }
4170 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004171 vixl::aarch64::Label* ldr_label =
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004172 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4173 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4174 GenerateGcRootFieldLoad(cls, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4175 generate_null_check = !cls->IsInDexCache();
4176 break;
4177 }
4178 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4179 MemberOffset resolved_types_offset =
4180 ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
4181 // /* GcRoot<mirror::Class>[] */ out =
4182 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4183 Register current_method = InputRegisterAt(cls, 0);
4184 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
4185 // /* GcRoot<mirror::Class> */ out = out[type_index]
4186 GenerateGcRootFieldLoad(
4187 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
4188 generate_null_check = !cls->IsInDexCache();
4189 break;
4190 }
4191 }
4192
4193 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4194 DCHECK(cls->CanCallRuntime());
4195 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4196 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4197 codegen_->AddSlowPath(slow_path);
4198 if (generate_null_check) {
4199 __ Cbz(out, slow_path->GetEntryLabel());
4200 }
4201 if (cls->MustGenerateClinitCheck()) {
4202 GenerateClassInitializationCheck(slow_path, out);
4203 } else {
4204 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004205 }
4206 }
4207}
4208
David Brazdilcb1c0552015-08-04 16:22:25 +01004209static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004210 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004211}
4212
Alexandre Rames67555f72014-11-18 10:55:16 +00004213void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4214 LocationSummary* locations =
4215 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4216 locations->SetOut(Location::RequiresRegister());
4217}
4218
4219void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004220 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4221}
4222
4223void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4224 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4225}
4226
4227void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4228 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004229}
4230
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004231HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4232 HLoadString::LoadKind desired_string_load_kind) {
4233 if (kEmitCompilerReadBarrier) {
4234 switch (desired_string_load_kind) {
4235 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4236 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4237 case HLoadString::LoadKind::kBootImageAddress:
4238 // TODO: Implement for read barrier.
4239 return HLoadString::LoadKind::kDexCacheViaMethod;
4240 default:
4241 break;
4242 }
4243 }
4244 switch (desired_string_load_kind) {
4245 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4246 DCHECK(!GetCompilerOptions().GetCompilePic());
4247 break;
4248 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4249 DCHECK(GetCompilerOptions().GetCompilePic());
4250 break;
4251 case HLoadString::LoadKind::kBootImageAddress:
4252 break;
4253 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01004254 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004255 break;
4256 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01004257 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004258 break;
4259 case HLoadString::LoadKind::kDexCacheViaMethod:
4260 break;
4261 }
4262 return desired_string_load_kind;
4263}
4264
Alexandre Rames67555f72014-11-18 10:55:16 +00004265void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004266 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004267 ? LocationSummary::kCallOnSlowPath
4268 : LocationSummary::kNoCall;
4269 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004270 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4271 locations->SetInAt(0, Location::RequiresRegister());
4272 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004273 locations->SetOut(Location::RequiresRegister());
4274}
4275
4276void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004277 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004278
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004279 switch (load->GetLoadKind()) {
4280 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4281 DCHECK(!kEmitCompilerReadBarrier);
4282 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4283 load->GetStringIndex()));
4284 return; // No dex cache slow path.
4285 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4286 DCHECK(!kEmitCompilerReadBarrier);
4287 // Add ADRP with its PC-relative String patch.
4288 const DexFile& dex_file = load->GetDexFile();
4289 uint32_t string_index = load->GetStringIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004290 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004291 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004292 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004293 __ Bind(adrp_label);
4294 __ adrp(out.X(), /* offset placeholder */ 0);
4295 }
4296 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004297 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004298 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4299 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004300 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004301 __ Bind(add_label);
4302 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4303 }
4304 return; // No dex cache slow path.
4305 }
4306 case HLoadString::LoadKind::kBootImageAddress: {
4307 DCHECK(!kEmitCompilerReadBarrier);
4308 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4309 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4310 return; // No dex cache slow path.
4311 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004312 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004313 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004314 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004315
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004316 // TODO: Re-add the compiler code to do string dex cache lookup again.
4317 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
4318 codegen_->AddSlowPath(slow_path);
4319 __ B(slow_path->GetEntryLabel());
4320 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004321}
4322
Alexandre Rames5319def2014-10-23 10:03:10 +01004323void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4324 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4325 locations->SetOut(Location::ConstantLocation(constant));
4326}
4327
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004328void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004329 // Will be generated at use site.
4330}
4331
Alexandre Rames67555f72014-11-18 10:55:16 +00004332void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4333 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004334 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004335 InvokeRuntimeCallingConvention calling_convention;
4336 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4337}
4338
4339void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004340 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject: kQuickUnlockObject,
4341 instruction,
4342 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004343 if (instruction->IsEnter()) {
4344 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4345 } else {
4346 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4347 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004348}
4349
Alexandre Rames42d641b2014-10-27 14:00:51 +00004350void LocationsBuilderARM64::VisitMul(HMul* mul) {
4351 LocationSummary* locations =
4352 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4353 switch (mul->GetResultType()) {
4354 case Primitive::kPrimInt:
4355 case Primitive::kPrimLong:
4356 locations->SetInAt(0, Location::RequiresRegister());
4357 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004358 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004359 break;
4360
4361 case Primitive::kPrimFloat:
4362 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004363 locations->SetInAt(0, Location::RequiresFpuRegister());
4364 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004365 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004366 break;
4367
4368 default:
4369 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4370 }
4371}
4372
4373void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4374 switch (mul->GetResultType()) {
4375 case Primitive::kPrimInt:
4376 case Primitive::kPrimLong:
4377 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4378 break;
4379
4380 case Primitive::kPrimFloat:
4381 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004382 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004383 break;
4384
4385 default:
4386 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4387 }
4388}
4389
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004390void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4391 LocationSummary* locations =
4392 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4393 switch (neg->GetResultType()) {
4394 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004395 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004396 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004397 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004398 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004399
4400 case Primitive::kPrimFloat:
4401 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004402 locations->SetInAt(0, Location::RequiresFpuRegister());
4403 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004404 break;
4405
4406 default:
4407 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4408 }
4409}
4410
4411void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4412 switch (neg->GetResultType()) {
4413 case Primitive::kPrimInt:
4414 case Primitive::kPrimLong:
4415 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4416 break;
4417
4418 case Primitive::kPrimFloat:
4419 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004420 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004421 break;
4422
4423 default:
4424 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4425 }
4426}
4427
4428void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4429 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004430 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004431 InvokeRuntimeCallingConvention calling_convention;
4432 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004433 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004434 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004435 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004436}
4437
4438void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4439 LocationSummary* locations = instruction->GetLocations();
4440 InvokeRuntimeCallingConvention calling_convention;
4441 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4442 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004443 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004444 // Note: if heap poisoning is enabled, the entry point takes cares
4445 // of poisoning the reference.
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004446 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Mathieu Chartiere401d142015-04-22 13:56:20 -07004447 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004448}
4449
Alexandre Rames5319def2014-10-23 10:03:10 +01004450void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4451 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004452 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004453 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004454 if (instruction->IsStringAlloc()) {
4455 locations->AddTemp(LocationFrom(kArtMethodRegister));
4456 } else {
4457 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4458 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4459 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004460 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4461}
4462
4463void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004464 // Note: if heap poisoning is enabled, the entry point takes cares
4465 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004466 if (instruction->IsStringAlloc()) {
4467 // String is allocated through StringFactory. Call NewEmptyString entry point.
4468 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07004469 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004470 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4471 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4472 __ Blr(lr);
4473 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4474 } else {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004475 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00004476 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4477 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004478}
4479
4480void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4481 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004482 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004483 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004484}
4485
4486void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004487 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004488 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004489 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004490 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004491 break;
4492
4493 default:
4494 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4495 }
4496}
4497
David Brazdil66d126e2015-04-03 16:02:44 +01004498void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4499 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4500 locations->SetInAt(0, Location::RequiresRegister());
4501 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4502}
4503
4504void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004505 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01004506}
4507
Alexandre Rames5319def2014-10-23 10:03:10 +01004508void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004509 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4510 ? LocationSummary::kCallOnSlowPath
4511 : LocationSummary::kNoCall;
4512 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004513 locations->SetInAt(0, Location::RequiresRegister());
4514 if (instruction->HasUses()) {
4515 locations->SetOut(Location::SameAsFirstInput());
4516 }
4517}
4518
Calin Juravle2ae48182016-03-16 14:05:09 +00004519void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4520 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004521 return;
4522 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004523
Alexandre Ramesd921d642015-04-16 15:07:16 +01004524 BlockPoolsScope block_pools(GetVIXLAssembler());
4525 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004526 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004527 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004528}
4529
Calin Juravle2ae48182016-03-16 14:05:09 +00004530void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004531 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004532 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004533
4534 LocationSummary* locations = instruction->GetLocations();
4535 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004536
4537 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004538}
4539
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004540void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004541 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004542}
4543
Alexandre Rames67555f72014-11-18 10:55:16 +00004544void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4545 HandleBinaryOp(instruction);
4546}
4547
4548void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4549 HandleBinaryOp(instruction);
4550}
4551
Alexandre Rames3e69f162014-12-10 10:36:50 +00004552void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4553 LOG(FATAL) << "Unreachable";
4554}
4555
4556void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4557 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4558}
4559
Alexandre Rames5319def2014-10-23 10:03:10 +01004560void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4561 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4562 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4563 if (location.IsStackSlot()) {
4564 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4565 } else if (location.IsDoubleStackSlot()) {
4566 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4567 }
4568 locations->SetOut(location);
4569}
4570
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004571void InstructionCodeGeneratorARM64::VisitParameterValue(
4572 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004573 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004574}
4575
4576void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4577 LocationSummary* locations =
4578 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004579 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004580}
4581
4582void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4583 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4584 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004585}
4586
4587void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4588 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004589 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004590 locations->SetInAt(i, Location::Any());
4591 }
4592 locations->SetOut(Location::Any());
4593}
4594
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004595void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004596 LOG(FATAL) << "Unreachable";
4597}
4598
Serban Constantinescu02164b32014-11-13 14:05:07 +00004599void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004600 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004601 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004602 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
4603 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004604 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4605
4606 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004607 case Primitive::kPrimInt:
4608 case Primitive::kPrimLong:
4609 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004610 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004611 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4612 break;
4613
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004614 case Primitive::kPrimFloat:
4615 case Primitive::kPrimDouble: {
4616 InvokeRuntimeCallingConvention calling_convention;
4617 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4618 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4619 locations->SetOut(calling_convention.GetReturnLocation(type));
4620
4621 break;
4622 }
4623
Serban Constantinescu02164b32014-11-13 14:05:07 +00004624 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004625 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004626 }
4627}
4628
4629void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4630 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004631
Serban Constantinescu02164b32014-11-13 14:05:07 +00004632 switch (type) {
4633 case Primitive::kPrimInt:
4634 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004635 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004636 break;
4637 }
4638
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004639 case Primitive::kPrimFloat:
4640 case Primitive::kPrimDouble: {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004641 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
4642 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004643 if (type == Primitive::kPrimFloat) {
4644 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4645 } else {
4646 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4647 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004648 break;
4649 }
4650
Serban Constantinescu02164b32014-11-13 14:05:07 +00004651 default:
4652 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004653 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004654 }
4655}
4656
Calin Juravle27df7582015-04-17 19:12:31 +01004657void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4658 memory_barrier->SetLocations(nullptr);
4659}
4660
4661void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004662 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004663}
4664
Alexandre Rames5319def2014-10-23 10:03:10 +01004665void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4666 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4667 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004668 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004669}
4670
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004671void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004672 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004673}
4674
4675void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4676 instruction->SetLocations(nullptr);
4677}
4678
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004679void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004680 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004681}
4682
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004683void LocationsBuilderARM64::VisitRor(HRor* ror) {
4684 HandleBinaryOp(ror);
4685}
4686
4687void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4688 HandleBinaryOp(ror);
4689}
4690
Serban Constantinescu02164b32014-11-13 14:05:07 +00004691void LocationsBuilderARM64::VisitShl(HShl* shl) {
4692 HandleShift(shl);
4693}
4694
4695void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4696 HandleShift(shl);
4697}
4698
4699void LocationsBuilderARM64::VisitShr(HShr* shr) {
4700 HandleShift(shr);
4701}
4702
4703void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4704 HandleShift(shr);
4705}
4706
Alexandre Rames5319def2014-10-23 10:03:10 +01004707void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004708 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004709}
4710
4711void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004712 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004713}
4714
Alexandre Rames67555f72014-11-18 10:55:16 +00004715void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004716 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004717}
4718
4719void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004720 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004721}
4722
4723void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004724 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004725}
4726
Alexandre Rames67555f72014-11-18 10:55:16 +00004727void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004728 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004729}
4730
Calin Juravlee460d1d2015-09-29 04:52:17 +01004731void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4732 HUnresolvedInstanceFieldGet* instruction) {
4733 FieldAccessCallingConventionARM64 calling_convention;
4734 codegen_->CreateUnresolvedFieldLocationSummary(
4735 instruction, instruction->GetFieldType(), calling_convention);
4736}
4737
4738void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4739 HUnresolvedInstanceFieldGet* instruction) {
4740 FieldAccessCallingConventionARM64 calling_convention;
4741 codegen_->GenerateUnresolvedFieldAccess(instruction,
4742 instruction->GetFieldType(),
4743 instruction->GetFieldIndex(),
4744 instruction->GetDexPc(),
4745 calling_convention);
4746}
4747
4748void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4749 HUnresolvedInstanceFieldSet* instruction) {
4750 FieldAccessCallingConventionARM64 calling_convention;
4751 codegen_->CreateUnresolvedFieldLocationSummary(
4752 instruction, instruction->GetFieldType(), calling_convention);
4753}
4754
4755void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4756 HUnresolvedInstanceFieldSet* instruction) {
4757 FieldAccessCallingConventionARM64 calling_convention;
4758 codegen_->GenerateUnresolvedFieldAccess(instruction,
4759 instruction->GetFieldType(),
4760 instruction->GetFieldIndex(),
4761 instruction->GetDexPc(),
4762 calling_convention);
4763}
4764
4765void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4766 HUnresolvedStaticFieldGet* instruction) {
4767 FieldAccessCallingConventionARM64 calling_convention;
4768 codegen_->CreateUnresolvedFieldLocationSummary(
4769 instruction, instruction->GetFieldType(), calling_convention);
4770}
4771
4772void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4773 HUnresolvedStaticFieldGet* instruction) {
4774 FieldAccessCallingConventionARM64 calling_convention;
4775 codegen_->GenerateUnresolvedFieldAccess(instruction,
4776 instruction->GetFieldType(),
4777 instruction->GetFieldIndex(),
4778 instruction->GetDexPc(),
4779 calling_convention);
4780}
4781
4782void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4783 HUnresolvedStaticFieldSet* instruction) {
4784 FieldAccessCallingConventionARM64 calling_convention;
4785 codegen_->CreateUnresolvedFieldLocationSummary(
4786 instruction, instruction->GetFieldType(), calling_convention);
4787}
4788
4789void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4790 HUnresolvedStaticFieldSet* instruction) {
4791 FieldAccessCallingConventionARM64 calling_convention;
4792 codegen_->GenerateUnresolvedFieldAccess(instruction,
4793 instruction->GetFieldType(),
4794 instruction->GetFieldIndex(),
4795 instruction->GetDexPc(),
4796 calling_convention);
4797}
4798
Alexandre Rames5319def2014-10-23 10:03:10 +01004799void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4800 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4801}
4802
4803void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004804 HBasicBlock* block = instruction->GetBlock();
4805 if (block->GetLoopInformation() != nullptr) {
4806 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4807 // The back edge will generate the suspend check.
4808 return;
4809 }
4810 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4811 // The goto will generate the suspend check.
4812 return;
4813 }
4814 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004815}
4816
Alexandre Rames67555f72014-11-18 10:55:16 +00004817void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4818 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004819 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004820 InvokeRuntimeCallingConvention calling_convention;
4821 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4822}
4823
4824void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004825 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004826 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004827}
4828
4829void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4830 LocationSummary* locations =
4831 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4832 Primitive::Type input_type = conversion->GetInputType();
4833 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004834 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004835 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4836 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4837 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4838 }
4839
Alexandre Rames542361f2015-01-29 16:57:31 +00004840 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004841 locations->SetInAt(0, Location::RequiresFpuRegister());
4842 } else {
4843 locations->SetInAt(0, Location::RequiresRegister());
4844 }
4845
Alexandre Rames542361f2015-01-29 16:57:31 +00004846 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004847 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4848 } else {
4849 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4850 }
4851}
4852
4853void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4854 Primitive::Type result_type = conversion->GetResultType();
4855 Primitive::Type input_type = conversion->GetInputType();
4856
4857 DCHECK_NE(input_type, result_type);
4858
Alexandre Rames542361f2015-01-29 16:57:31 +00004859 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004860 int result_size = Primitive::ComponentSize(result_type);
4861 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004862 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004863 Register output = OutputRegister(conversion);
4864 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004865 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004866 // 'int' values are used directly as W registers, discarding the top
4867 // bits, so we don't need to sign-extend and can just perform a move.
4868 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4869 // top 32 bits of the target register. We theoretically could leave those
4870 // bits unchanged, but we would have to make sure that no code uses a
4871 // 32bit input value as a 64bit value assuming that the top 32 bits are
4872 // zero.
4873 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004874 } else if (result_type == Primitive::kPrimChar ||
4875 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4876 __ Ubfx(output,
4877 output.IsX() ? source.X() : source.W(),
4878 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004879 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004880 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004881 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004882 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004883 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004884 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004885 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4886 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004887 } else if (Primitive::IsFloatingPointType(result_type) &&
4888 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004889 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4890 } else {
4891 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4892 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004893 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004894}
Alexandre Rames67555f72014-11-18 10:55:16 +00004895
Serban Constantinescu02164b32014-11-13 14:05:07 +00004896void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4897 HandleShift(ushr);
4898}
4899
4900void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4901 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004902}
4903
4904void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4905 HandleBinaryOp(instruction);
4906}
4907
4908void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4909 HandleBinaryOp(instruction);
4910}
4911
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004912void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004913 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004914 LOG(FATAL) << "Unreachable";
4915}
4916
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004917void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004918 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004919 LOG(FATAL) << "Unreachable";
4920}
4921
Mark Mendellfe57faa2015-09-18 09:26:15 -04004922// Simple implementation of packed switch - generate cascaded compare/jumps.
4923void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4924 LocationSummary* locations =
4925 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4926 locations->SetInAt(0, Location::RequiresRegister());
4927}
4928
4929void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4930 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004931 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004932 Register value_reg = InputRegisterAt(switch_instr, 0);
4933 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4934
Zheng Xu3927c8b2015-11-18 17:46:25 +08004935 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004936 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08004937 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4938 // make sure we don't emit it if the target may run out of range.
4939 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4940 // ranges and emit the tables only as required.
4941 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004942
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004943 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004944 // Current instruction id is an upper bound of the number of HIRs in the graph.
4945 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4946 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004947 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4948 Register temp = temps.AcquireW();
4949 __ Subs(temp, value_reg, Operand(lower_bound));
4950
Zheng Xu3927c8b2015-11-18 17:46:25 +08004951 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004952 // Jump to successors[0] if value == lower_bound.
4953 __ B(eq, codegen_->GetLabelOf(successors[0]));
4954 int32_t last_index = 0;
4955 for (; num_entries - last_index > 2; last_index += 2) {
4956 __ Subs(temp, temp, Operand(2));
4957 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4958 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4959 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4960 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4961 }
4962 if (num_entries - last_index == 2) {
4963 // The last missing case_value.
4964 __ Cmp(temp, Operand(1));
4965 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004966 }
4967
4968 // And the default for any other value.
4969 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4970 __ B(codegen_->GetLabelOf(default_block));
4971 }
4972 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004973 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004974
4975 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4976
4977 // Below instructions should use at most one blocked register. Since there are two blocked
4978 // registers, we are free to block one.
4979 Register temp_w = temps.AcquireW();
4980 Register index;
4981 // Remove the bias.
4982 if (lower_bound != 0) {
4983 index = temp_w;
4984 __ Sub(index, value_reg, Operand(lower_bound));
4985 } else {
4986 index = value_reg;
4987 }
4988
4989 // Jump to default block if index is out of the range.
4990 __ Cmp(index, Operand(num_entries));
4991 __ B(hs, codegen_->GetLabelOf(default_block));
4992
4993 // In current VIXL implementation, it won't require any blocked registers to encode the
4994 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4995 // register pressure.
4996 Register table_base = temps.AcquireX();
4997 // Load jump offset from the table.
4998 __ Adr(table_base, jump_table->GetTableStartLabel());
4999 Register jump_offset = temp_w;
5000 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5001
5002 // Jump to target block by branching to table_base(pc related) + offset.
5003 Register target_address = table_base;
5004 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5005 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005006 }
5007}
5008
Roland Levillain44015862016-01-22 11:47:17 +00005009void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
5010 Location out,
5011 uint32_t offset,
5012 Location maybe_temp) {
5013 Primitive::Type type = Primitive::kPrimNot;
5014 Register out_reg = RegisterFrom(out, type);
5015 if (kEmitCompilerReadBarrier) {
5016 Register temp_reg = RegisterFrom(maybe_temp, type);
5017 if (kUseBakerReadBarrier) {
5018 // Load with fast path based Baker's read barrier.
5019 // /* HeapReference<Object> */ out = *(out + offset)
5020 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5021 out,
5022 out_reg,
5023 offset,
5024 temp_reg,
5025 /* needs_null_check */ false,
5026 /* use_load_acquire */ false);
5027 } else {
5028 // Load with slow path based read barrier.
5029 // Save the value of `out` into `maybe_temp` before overwriting it
5030 // in the following move operation, as we will need it for the
5031 // read barrier below.
5032 __ Mov(temp_reg, out_reg);
5033 // /* HeapReference<Object> */ out = *(out + offset)
5034 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5035 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5036 }
5037 } else {
5038 // Plain load with no read barrier.
5039 // /* HeapReference<Object> */ out = *(out + offset)
5040 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5041 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5042 }
5043}
5044
5045void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
5046 Location out,
5047 Location obj,
5048 uint32_t offset,
5049 Location maybe_temp) {
5050 Primitive::Type type = Primitive::kPrimNot;
5051 Register out_reg = RegisterFrom(out, type);
5052 Register obj_reg = RegisterFrom(obj, type);
5053 if (kEmitCompilerReadBarrier) {
5054 if (kUseBakerReadBarrier) {
5055 // Load with fast path based Baker's read barrier.
5056 Register temp_reg = RegisterFrom(maybe_temp, type);
5057 // /* HeapReference<Object> */ out = *(obj + offset)
5058 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5059 out,
5060 obj_reg,
5061 offset,
5062 temp_reg,
5063 /* needs_null_check */ false,
5064 /* use_load_acquire */ false);
5065 } else {
5066 // Load with slow path based read barrier.
5067 // /* HeapReference<Object> */ out = *(obj + offset)
5068 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5069 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5070 }
5071 } else {
5072 // Plain load with no read barrier.
5073 // /* HeapReference<Object> */ out = *(obj + offset)
5074 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5075 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5076 }
5077}
5078
5079void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
5080 Location root,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005081 Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005082 uint32_t offset,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005083 vixl::aarch64::Label* fixup_label) {
Roland Levillain44015862016-01-22 11:47:17 +00005084 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
5085 if (kEmitCompilerReadBarrier) {
5086 if (kUseBakerReadBarrier) {
5087 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5088 // Baker's read barrier are used:
5089 //
5090 // root = obj.field;
5091 // if (Thread::Current()->GetIsGcMarking()) {
5092 // root = ReadBarrier::Mark(root)
5093 // }
5094
5095 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005096 if (fixup_label == nullptr) {
5097 __ Ldr(root_reg, MemOperand(obj, offset));
5098 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005099 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005100 __ Bind(fixup_label);
5101 __ ldr(root_reg, MemOperand(obj, offset));
5102 }
Roland Levillain44015862016-01-22 11:47:17 +00005103 static_assert(
5104 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5105 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5106 "have different sizes.");
5107 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5108 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5109 "have different sizes.");
5110
Vladimir Marko953437b2016-08-24 08:30:46 +00005111 // Slow path marking the GC root `root`.
Roland Levillain44015862016-01-22 11:47:17 +00005112 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005113 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root);
Roland Levillain44015862016-01-22 11:47:17 +00005114 codegen_->AddSlowPath(slow_path);
5115
5116 MacroAssembler* masm = GetVIXLAssembler();
5117 UseScratchRegisterScope temps(masm);
5118 Register temp = temps.AcquireW();
5119 // temp = Thread::Current()->GetIsGcMarking()
Andreas Gampe542451c2016-07-26 09:02:02 -07005120 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64PointerSize>().Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00005121 __ Cbnz(temp, slow_path->GetEntryLabel());
5122 __ Bind(slow_path->GetExitLabel());
5123 } else {
5124 // GC root loaded through a slow path for read barriers other
5125 // than Baker's.
5126 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005127 if (fixup_label == nullptr) {
5128 __ Add(root_reg.X(), obj.X(), offset);
5129 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005130 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005131 __ Bind(fixup_label);
5132 __ add(root_reg.X(), obj.X(), offset);
5133 }
Roland Levillain44015862016-01-22 11:47:17 +00005134 // /* mirror::Object* */ root = root->Read()
5135 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5136 }
5137 } else {
5138 // Plain GC root load with no read barrier.
5139 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005140 if (fixup_label == nullptr) {
5141 __ Ldr(root_reg, MemOperand(obj, offset));
5142 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005143 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005144 __ Bind(fixup_label);
5145 __ ldr(root_reg, MemOperand(obj, offset));
5146 }
Roland Levillain44015862016-01-22 11:47:17 +00005147 // Note that GC roots are not affected by heap poisoning, thus we
5148 // do not have to unpoison `root_reg` here.
5149 }
5150}
5151
5152void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5153 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005154 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005155 uint32_t offset,
5156 Register temp,
5157 bool needs_null_check,
5158 bool use_load_acquire) {
5159 DCHECK(kEmitCompilerReadBarrier);
5160 DCHECK(kUseBakerReadBarrier);
5161
5162 // /* HeapReference<Object> */ ref = *(obj + offset)
5163 Location no_index = Location::NoLocation();
Roland Levillainbfea3352016-06-23 13:48:47 +01005164 size_t no_scale_factor = 0U;
5165 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5166 ref,
5167 obj,
5168 offset,
5169 no_index,
5170 no_scale_factor,
5171 temp,
5172 needs_null_check,
5173 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005174}
5175
5176void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5177 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005178 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005179 uint32_t data_offset,
5180 Location index,
5181 Register temp,
5182 bool needs_null_check) {
5183 DCHECK(kEmitCompilerReadBarrier);
5184 DCHECK(kUseBakerReadBarrier);
5185
5186 // Array cells are never volatile variables, therefore array loads
5187 // never use Load-Acquire instructions on ARM64.
5188 const bool use_load_acquire = false;
5189
Roland Levillainbfea3352016-06-23 13:48:47 +01005190 static_assert(
5191 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5192 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005193 // /* HeapReference<Object> */ ref =
5194 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005195 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5196 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5197 ref,
5198 obj,
5199 data_offset,
5200 index,
5201 scale_factor,
5202 temp,
5203 needs_null_check,
5204 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005205}
5206
5207void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5208 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005209 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005210 uint32_t offset,
5211 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005212 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005213 Register temp,
5214 bool needs_null_check,
5215 bool use_load_acquire) {
5216 DCHECK(kEmitCompilerReadBarrier);
5217 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005218 // If we are emitting an array load, we should not be using a
5219 // Load Acquire instruction. In other words:
5220 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5221 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005222
5223 MacroAssembler* masm = GetVIXLAssembler();
5224 UseScratchRegisterScope temps(masm);
5225
5226 // In slow path based read barriers, the read barrier call is
5227 // inserted after the original load. However, in fast path based
5228 // Baker's read barriers, we need to perform the load of
5229 // mirror::Object::monitor_ *before* the original reference load.
5230 // This load-load ordering is required by the read barrier.
5231 // The fast path/slow path (for Baker's algorithm) should look like:
5232 //
5233 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5234 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5235 // HeapReference<Object> ref = *src; // Original reference load.
5236 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5237 // if (is_gray) {
5238 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5239 // }
5240 //
5241 // Note: the original implementation in ReadBarrier::Barrier is
5242 // slightly more complex as it performs additional checks that we do
5243 // not do here for performance reasons.
5244
5245 Primitive::Type type = Primitive::kPrimNot;
5246 Register ref_reg = RegisterFrom(ref, type);
5247 DCHECK(obj.IsW());
5248 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5249
5250 // /* int32_t */ monitor = obj->monitor_
5251 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5252 if (needs_null_check) {
5253 MaybeRecordImplicitNullCheck(instruction);
5254 }
5255 // /* LockWord */ lock_word = LockWord(monitor)
5256 static_assert(sizeof(LockWord) == sizeof(int32_t),
5257 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005258
Vladimir Marko877a0332016-07-11 19:30:56 +01005259 // Introduce a dependency on the lock_word including rb_state,
5260 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005261 // a memory barrier (which would be more expensive).
Roland Levillain0b671c02016-08-19 12:02:34 +01005262 // `obj` is unchanged by this operation, but its value now depends
5263 // on `temp`.
Vladimir Marko877a0332016-07-11 19:30:56 +01005264 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005265
5266 // The actual reference load.
5267 if (index.IsValid()) {
Roland Levillainbfea3352016-06-23 13:48:47 +01005268 // Load types involving an "index".
5269 if (use_load_acquire) {
5270 // UnsafeGetObjectVolatile intrinsic case.
5271 // Register `index` is not an index in an object array, but an
5272 // offset to an object reference field within object `obj`.
5273 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5274 DCHECK(instruction->GetLocations()->Intrinsified());
5275 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5276 << instruction->AsInvoke()->GetIntrinsic();
5277 DCHECK_EQ(offset, 0U);
5278 DCHECK_EQ(scale_factor, 0U);
5279 DCHECK_EQ(needs_null_check, 0U);
5280 // /* HeapReference<Object> */ ref = *(obj + index)
5281 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5282 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005283 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005284 // ArrayGet and UnsafeGetObject intrinsics cases.
5285 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5286 if (index.IsConstant()) {
5287 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5288 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5289 } else {
Vladimir Marko877a0332016-07-11 19:30:56 +01005290 Register temp2 = temps.AcquireW();
Roland Levillainbfea3352016-06-23 13:48:47 +01005291 __ Add(temp2, obj, offset);
5292 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, scale_factor));
5293 temps.Release(temp2);
5294 }
Roland Levillain44015862016-01-22 11:47:17 +00005295 }
Roland Levillain44015862016-01-22 11:47:17 +00005296 } else {
5297 // /* HeapReference<Object> */ ref = *(obj + offset)
5298 MemOperand field = HeapOperand(obj, offset);
5299 if (use_load_acquire) {
5300 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5301 } else {
5302 Load(type, ref_reg, field);
5303 }
5304 }
5305
5306 // Object* ref = ref_addr->AsMirrorPtr()
5307 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5308
Vladimir Marko953437b2016-08-24 08:30:46 +00005309 // Slow path marking the object `ref` when it is gray.
Roland Levillain44015862016-01-22 11:47:17 +00005310 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005311 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
Roland Levillain44015862016-01-22 11:47:17 +00005312 AddSlowPath(slow_path);
5313
5314 // if (rb_state == ReadBarrier::gray_ptr_)
5315 // ref = ReadBarrier::Mark(ref);
Vladimir Marko877a0332016-07-11 19:30:56 +01005316 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5317 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
5318 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
5319 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
5320 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005321 __ Bind(slow_path->GetExitLabel());
5322}
5323
5324void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5325 Location out,
5326 Location ref,
5327 Location obj,
5328 uint32_t offset,
5329 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005330 DCHECK(kEmitCompilerReadBarrier);
5331
Roland Levillain44015862016-01-22 11:47:17 +00005332 // Insert a slow path based read barrier *after* the reference load.
5333 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005334 // If heap poisoning is enabled, the unpoisoning of the loaded
5335 // reference will be carried out by the runtime within the slow
5336 // path.
5337 //
5338 // Note that `ref` currently does not get unpoisoned (when heap
5339 // poisoning is enabled), which is alright as the `ref` argument is
5340 // not used by the artReadBarrierSlow entry point.
5341 //
5342 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5343 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5344 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5345 AddSlowPath(slow_path);
5346
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005347 __ B(slow_path->GetEntryLabel());
5348 __ Bind(slow_path->GetExitLabel());
5349}
5350
Roland Levillain44015862016-01-22 11:47:17 +00005351void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5352 Location out,
5353 Location ref,
5354 Location obj,
5355 uint32_t offset,
5356 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005357 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005358 // Baker's read barriers shall be handled by the fast path
5359 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5360 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005361 // If heap poisoning is enabled, unpoisoning will be taken care of
5362 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005363 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005364 } else if (kPoisonHeapReferences) {
5365 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5366 }
5367}
5368
Roland Levillain44015862016-01-22 11:47:17 +00005369void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5370 Location out,
5371 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005372 DCHECK(kEmitCompilerReadBarrier);
5373
Roland Levillain44015862016-01-22 11:47:17 +00005374 // Insert a slow path based read barrier *after* the GC root load.
5375 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005376 // Note that GC roots are not affected by heap poisoning, so we do
5377 // not need to do anything special for this here.
5378 SlowPathCodeARM64* slow_path =
5379 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5380 AddSlowPath(slow_path);
5381
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005382 __ B(slow_path->GetEntryLabel());
5383 __ Bind(slow_path->GetExitLabel());
5384}
5385
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005386void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5387 LocationSummary* locations =
5388 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5389 locations->SetInAt(0, Location::RequiresRegister());
5390 locations->SetOut(Location::RequiresRegister());
5391}
5392
5393void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5394 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005395 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005396 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005397 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005398 __ Ldr(XRegisterFrom(locations->Out()),
5399 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005400 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005401 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005402 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005403 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5404 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005405 __ Ldr(XRegisterFrom(locations->Out()),
5406 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005407 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005408}
5409
5410
5411
Alexandre Rames67555f72014-11-18 10:55:16 +00005412#undef __
5413#undef QUICK_ENTRY_POINT
5414
Alexandre Rames5319def2014-10-23 10:03:10 +01005415} // namespace arm64
5416} // namespace art