blob: 571760d3285470340956b33c4f29935b9de632ad [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Rames5319def2014-10-23 10:03:10 +010037
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
Roland Levillain22ccc3a2015-11-24 13:10:05 +000044template<class MirrorType>
45class GcRoot;
46
Alexandre Rames5319def2014-10-23 10:03:10 +010047namespace arm64 {
48
Alexandre Ramesbe919d92016-08-23 18:33:36 +010049using helpers::ARM64EncodableConstantOrRegister;
50using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080051using helpers::CPURegisterFrom;
52using helpers::DRegisterFrom;
53using helpers::FPRegisterFrom;
54using helpers::HeapOperand;
55using helpers::HeapOperandFrom;
56using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010057using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080058using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080059using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080061using helpers::Int64ConstantFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010062using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080063using helpers::LocationFrom;
64using helpers::OperandFromMemOperand;
65using helpers::OutputCPURegister;
66using helpers::OutputFPRegister;
67using helpers::OutputRegister;
68using helpers::RegisterFrom;
69using helpers::StackOperandFrom;
70using helpers::VIXLRegCodeFromART;
71using helpers::WRegisterFrom;
72using helpers::XRegisterFrom;
73
Alexandre Rames5319def2014-10-23 10:03:10 +010074static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000075// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080076// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
77// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000078static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010079
Alexandre Rames5319def2014-10-23 10:03:10 +010080inline Condition ARM64Condition(IfCondition cond) {
81 switch (cond) {
82 case kCondEQ: return eq;
83 case kCondNE: return ne;
84 case kCondLT: return lt;
85 case kCondLE: return le;
86 case kCondGT: return gt;
87 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070088 case kCondB: return lo;
89 case kCondBE: return ls;
90 case kCondA: return hi;
91 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010092 }
Roland Levillain7f63c522015-07-13 15:54:55 +000093 LOG(FATAL) << "Unreachable";
94 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010095}
96
Vladimir Markod6e069b2016-01-18 11:11:01 +000097inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
98 // The ARM64 condition codes can express all the necessary branches, see the
99 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
100 // There is no dex instruction or HIR that would need the missing conditions
101 // "equal or unordered" or "not equal".
102 switch (cond) {
103 case kCondEQ: return eq;
104 case kCondNE: return ne /* unordered */;
105 case kCondLT: return gt_bias ? cc : lt /* unordered */;
106 case kCondLE: return gt_bias ? ls : le /* unordered */;
107 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
108 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
109 default:
110 LOG(FATAL) << "UNREACHABLE";
111 UNREACHABLE();
112 }
113}
114
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000116 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
117 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
118 // but we use the exact registers for clarity.
119 if (return_type == Primitive::kPrimFloat) {
120 return LocationFrom(s0);
121 } else if (return_type == Primitive::kPrimDouble) {
122 return LocationFrom(d0);
123 } else if (return_type == Primitive::kPrimLong) {
124 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100125 } else if (return_type == Primitive::kPrimVoid) {
126 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000127 } else {
128 return LocationFrom(w0);
129 }
130}
131
Alexandre Rames5319def2014-10-23 10:03:10 +0100132Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000133 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100134}
135
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100136// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
137#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700138#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100139
Zheng Xuda403092015-04-24 17:35:39 +0800140// Calculate memory accessing operand for save/restore live registers.
141static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
142 RegisterSet* register_set,
143 int64_t spill_offset,
144 bool is_save) {
145 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
146 codegen->GetNumberOfCoreRegisters(),
147 register_set->GetFloatingPointRegisters(),
148 codegen->GetNumberOfFloatingPointRegisters()));
149
150 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100151 register_set->GetCoreRegisters() & (~callee_saved_core_registers.GetList()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000152 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100153 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.GetList()));
Zheng Xuda403092015-04-24 17:35:39 +0800154
155 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
156 UseScratchRegisterScope temps(masm);
157
158 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100159 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
160 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800161 int64_t reg_size = kXRegSizeInBytes;
162 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
163 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100164 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800165 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
166 // If the offset does not fit in the instruction's immediate field, use an alternate register
167 // to compute the base address(float point registers spill base address).
168 Register new_base = temps.AcquireSameSizeAs(base);
169 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
170 base = new_base;
171 spill_offset = -core_spill_size;
172 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
173 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
174 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
175 }
176
177 if (is_save) {
178 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
179 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
180 } else {
181 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
182 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
183 }
184}
185
186void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
187 RegisterSet* register_set = locations->GetLiveRegisters();
188 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
189 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
190 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
191 // If the register holds an object, update the stack mask.
192 if (locations->RegisterContainsObject(i)) {
193 locations->SetStackBit(stack_offset / kVRegSize);
194 }
195 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
196 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
197 saved_core_stack_offsets_[i] = stack_offset;
198 stack_offset += kXRegSizeInBytes;
199 }
200 }
201
202 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
203 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
204 register_set->ContainsFloatingPointRegister(i)) {
205 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
206 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
207 saved_fpu_stack_offsets_[i] = stack_offset;
208 stack_offset += kDRegSizeInBytes;
209 }
210 }
211
212 SaveRestoreLiveRegistersHelper(codegen, register_set,
213 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
214}
215
216void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
217 RegisterSet* register_set = locations->GetLiveRegisters();
218 SaveRestoreLiveRegistersHelper(codegen, register_set,
219 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
220}
221
Alexandre Rames5319def2014-10-23 10:03:10 +0100222class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
223 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000224 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100225
Alexandre Rames67555f72014-11-18 10:55:16 +0000226 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000228 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100229
Alexandre Rames5319def2014-10-23 10:03:10 +0100230 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000231 if (instruction_->CanThrowIntoCatchBlock()) {
232 // Live registers will be restored in the catch block if caught.
233 SaveLiveRegisters(codegen, instruction_->GetLocations());
234 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000235 // We're moving two locations to locations that could overlap, so we need a parallel
236 // move resolver.
237 InvokeRuntimeCallingConvention calling_convention;
238 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100239 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
240 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000241 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
242 ? kQuickThrowStringBounds
243 : kQuickThrowArrayBounds;
244 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100245 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800246 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100247 }
248
Alexandre Rames8158f282015-08-07 10:26:17 +0100249 bool IsFatal() const OVERRIDE { return true; }
250
Alexandre Rames9931f312015-06-19 14:47:01 +0100251 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
252
Alexandre Rames5319def2014-10-23 10:03:10 +0100253 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
255};
256
Alexandre Rames67555f72014-11-18 10:55:16 +0000257class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
258 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000259 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000260
261 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
262 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
263 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000264 if (instruction_->CanThrowIntoCatchBlock()) {
265 // Live registers will be restored in the catch block if caught.
266 SaveLiveRegisters(codegen, instruction_->GetLocations());
267 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000268 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800269 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000270 }
271
Alexandre Rames8158f282015-08-07 10:26:17 +0100272 bool IsFatal() const OVERRIDE { return true; }
273
Alexandre Rames9931f312015-06-19 14:47:01 +0100274 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
275
Alexandre Rames67555f72014-11-18 10:55:16 +0000276 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000277 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
278};
279
280class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
281 public:
282 LoadClassSlowPathARM64(HLoadClass* cls,
283 HInstruction* at,
284 uint32_t dex_pc,
285 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000286 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000287 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
288 }
289
290 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
291 LocationSummary* locations = at_->GetLocations();
292 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
293
294 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000295 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000296
297 InvokeRuntimeCallingConvention calling_convention;
298 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000299 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
300 : kQuickInitializeType;
301 arm64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100303 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800304 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100305 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800306 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000307
308 // Move the class to the desired location.
309 Location out = locations->Out();
310 if (out.IsValid()) {
311 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
312 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000313 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000314 }
315
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000316 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000317 __ B(GetExitLabel());
318 }
319
Alexandre Rames9931f312015-06-19 14:47:01 +0100320 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
321
Alexandre Rames67555f72014-11-18 10:55:16 +0000322 private:
323 // The class this slow path will load.
324 HLoadClass* const cls_;
325
326 // The instruction where this slow path is happening.
327 // (Might be the load class or an initialization check).
328 HInstruction* const at_;
329
330 // The dex PC of `at_`.
331 const uint32_t dex_pc_;
332
333 // Whether to initialize the class.
334 const bool do_clinit_;
335
336 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
337};
338
Alexandre Rames5319def2014-10-23 10:03:10 +0100339class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
340 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000341 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100342
Alexandre Rames67555f72014-11-18 10:55:16 +0000343 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
344 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100345 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000346 if (instruction_->CanThrowIntoCatchBlock()) {
347 // Live registers will be restored in the catch block if caught.
348 SaveLiveRegisters(codegen, instruction_->GetLocations());
349 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000350 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
351 instruction_,
352 instruction_->GetDexPc(),
353 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800354 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100355 }
356
Alexandre Rames8158f282015-08-07 10:26:17 +0100357 bool IsFatal() const OVERRIDE { return true; }
358
Alexandre Rames9931f312015-06-19 14:47:01 +0100359 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
360
Alexandre Rames5319def2014-10-23 10:03:10 +0100361 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100362 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
363};
364
365class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
366 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100367 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000368 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100369
Alexandre Rames67555f72014-11-18 10:55:16 +0000370 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
371 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100372 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000373 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800374 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000375 if (successor_ == nullptr) {
376 __ B(GetReturnLabel());
377 } else {
378 __ B(arm64_codegen->GetLabelOf(successor_));
379 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100380 }
381
Scott Wakeling97c72b72016-06-24 16:19:36 +0100382 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100383 DCHECK(successor_ == nullptr);
384 return &return_label_;
385 }
386
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100387 HBasicBlock* GetSuccessor() const {
388 return successor_;
389 }
390
Alexandre Rames9931f312015-06-19 14:47:01 +0100391 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
392
Alexandre Rames5319def2014-10-23 10:03:10 +0100393 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100394 // If not null, the block to branch to after the suspend check.
395 HBasicBlock* const successor_;
396
397 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100398 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100399
400 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
401};
402
Alexandre Rames67555f72014-11-18 10:55:16 +0000403class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
404 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000405 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000406 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000407
408 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000409 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100410 Location class_to_check = locations->InAt(1);
411 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
412 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000413 DCHECK(instruction_->IsCheckCast()
414 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
415 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100416 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000417
Alexandre Rames67555f72014-11-18 10:55:16 +0000418 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000419
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000420 if (!is_fatal_) {
421 SaveLiveRegisters(codegen, locations);
422 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000423
424 // We're moving two locations to locations that could overlap, so we need a parallel
425 // move resolver.
426 InvokeRuntimeCallingConvention calling_convention;
427 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100428 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
429 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000430
431 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000432 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Andreas Gampe67409972016-07-19 22:34:53 -0700433 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t,
Roland Levillain888d0672015-11-23 18:53:50 +0000434 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000435 Primitive::Type ret_type = instruction_->GetType();
436 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
437 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
438 } else {
439 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000440 arm64_codegen->InvokeRuntime(kQuickCheckCast, instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800441 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000442 }
443
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000444 if (!is_fatal_) {
445 RestoreLiveRegisters(codegen, locations);
446 __ B(GetExitLabel());
447 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000448 }
449
Alexandre Rames9931f312015-06-19 14:47:01 +0100450 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000451 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100452
Alexandre Rames67555f72014-11-18 10:55:16 +0000453 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000454 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000455
Alexandre Rames67555f72014-11-18 10:55:16 +0000456 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
457};
458
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700459class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
460 public:
Aart Bik42249c32016-01-07 15:33:50 -0800461 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000462 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700463
464 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800465 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700466 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000467 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000468 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700469 }
470
Alexandre Rames9931f312015-06-19 14:47:01 +0100471 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
472
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700473 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700474 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
475};
476
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100477class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
478 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000479 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100480
481 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
482 LocationSummary* locations = instruction_->GetLocations();
483 __ Bind(GetEntryLabel());
484 SaveLiveRegisters(codegen, locations);
485
486 InvokeRuntimeCallingConvention calling_convention;
487 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
488 parallel_move.AddMove(
489 locations->InAt(0),
490 LocationFrom(calling_convention.GetRegisterAt(0)),
491 Primitive::kPrimNot,
492 nullptr);
493 parallel_move.AddMove(
494 locations->InAt(1),
495 LocationFrom(calling_convention.GetRegisterAt(1)),
496 Primitive::kPrimInt,
497 nullptr);
498 parallel_move.AddMove(
499 locations->InAt(2),
500 LocationFrom(calling_convention.GetRegisterAt(2)),
501 Primitive::kPrimNot,
502 nullptr);
503 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
504
505 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000506 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100507 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
508 RestoreLiveRegisters(codegen, locations);
509 __ B(GetExitLabel());
510 }
511
512 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
513
514 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100515 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
516};
517
Zheng Xu3927c8b2015-11-18 17:46:25 +0800518void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
519 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000520 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800521
522 // We are about to use the assembler to place literals directly. Make sure we have enough
523 // underlying code buffer and we have generated the jump table with right size.
524 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
525 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
526
527 __ Bind(&table_start_);
528 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
529 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100530 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800531 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100532 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800533 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
534 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
535 Literal<int32_t> literal(jump_offset);
536 __ place(&literal);
537 }
538}
539
Roland Levillain44015862016-01-22 11:47:17 +0000540// Slow path marking an object during a read barrier.
541class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
542 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100543 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location obj)
544 : SlowPathCodeARM64(instruction), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000545 DCHECK(kEmitCompilerReadBarrier);
546 }
547
548 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
549
550 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
551 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain44015862016-01-22 11:47:17 +0000552 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100553 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(obj_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000554 DCHECK(instruction_->IsInstanceFieldGet() ||
555 instruction_->IsStaticFieldGet() ||
556 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100557 instruction_->IsArraySet() ||
Roland Levillain44015862016-01-22 11:47:17 +0000558 instruction_->IsLoadClass() ||
559 instruction_->IsLoadString() ||
560 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100561 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100562 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
563 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000564 << "Unexpected instruction in read barrier marking slow path: "
565 << instruction_->DebugName();
566
567 __ Bind(GetEntryLabel());
Roland Levillain4359e612016-07-20 11:32:19 +0100568 // No need to save live registers; it's taken care of by the
569 // entrypoint. Also, there is no need to update the stack mask,
570 // as this runtime call will not trigger a garbage collection.
Roland Levillain44015862016-01-22 11:47:17 +0000571 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100572 DCHECK_NE(obj_.reg(), LR);
573 DCHECK_NE(obj_.reg(), WSP);
574 DCHECK_NE(obj_.reg(), WZR);
Roland Levillain0b671c02016-08-19 12:02:34 +0100575 // IP0 is used internally by the ReadBarrierMarkRegX entry point
576 // as a temporary, it cannot be the entry point's input/output.
Mathieu Chartier36a270a2016-07-28 18:08:51 -0700577 DCHECK_NE(obj_.reg(), IP0);
Roland Levillain02b75802016-07-13 11:54:35 +0100578 DCHECK(0 <= obj_.reg() && obj_.reg() < kNumberOfWRegisters) << obj_.reg();
579 // "Compact" slow path, saving two moves.
580 //
581 // Instead of using the standard runtime calling convention (input
582 // and output in W0):
583 //
584 // W0 <- obj
585 // W0 <- ReadBarrierMark(W0)
586 // obj <- W0
587 //
588 // we just use rX (the register holding `obj`) as input and output
589 // of a dedicated entrypoint:
590 //
591 // rX <- ReadBarrierMarkRegX(rX)
592 //
593 int32_t entry_point_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -0700594 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(obj_.reg());
Roland Levillaindec8f632016-07-22 17:10:06 +0100595 // This runtime call does not require a stack map.
596 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain44015862016-01-22 11:47:17 +0000597 __ B(GetExitLabel());
598 }
599
600 private:
Roland Levillain44015862016-01-22 11:47:17 +0000601 const Location obj_;
602
603 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
604};
605
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000606// Slow path generating a read barrier for a heap reference.
607class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
608 public:
609 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
610 Location out,
611 Location ref,
612 Location obj,
613 uint32_t offset,
614 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000615 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000616 out_(out),
617 ref_(ref),
618 obj_(obj),
619 offset_(offset),
620 index_(index) {
621 DCHECK(kEmitCompilerReadBarrier);
622 // If `obj` is equal to `out` or `ref`, it means the initial object
623 // has been overwritten by (or after) the heap object reference load
624 // to be instrumented, e.g.:
625 //
626 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000627 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000628 //
629 // In that case, we have lost the information about the original
630 // object, and the emitted read barrier cannot work properly.
631 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
632 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
633 }
634
635 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
636 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
637 LocationSummary* locations = instruction_->GetLocations();
638 Primitive::Type type = Primitive::kPrimNot;
639 DCHECK(locations->CanCall());
640 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100641 DCHECK(instruction_->IsInstanceFieldGet() ||
642 instruction_->IsStaticFieldGet() ||
643 instruction_->IsArrayGet() ||
644 instruction_->IsInstanceOf() ||
645 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100646 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000647 << "Unexpected instruction in read barrier for heap reference slow path: "
648 << instruction_->DebugName();
Roland Levillain4a3aa572016-08-15 13:17:06 +0000649 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000650 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100651 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000652
653 __ Bind(GetEntryLabel());
654
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000655 SaveLiveRegisters(codegen, locations);
656
657 // We may have to change the index's value, but as `index_` is a
658 // constant member (like other "inputs" of this slow path),
659 // introduce a copy of it, `index`.
660 Location index = index_;
661 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100662 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000663 if (instruction_->IsArrayGet()) {
664 // Compute the actual memory offset and store it in `index`.
665 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
666 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
667 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
668 // We are about to change the value of `index_reg` (see the
669 // calls to vixl::MacroAssembler::Lsl and
670 // vixl::MacroAssembler::Mov below), but it has
671 // not been saved by the previous call to
672 // art::SlowPathCode::SaveLiveRegisters, as it is a
673 // callee-save register --
674 // art::SlowPathCode::SaveLiveRegisters does not consider
675 // callee-save registers, as it has been designed with the
676 // assumption that callee-save registers are supposed to be
677 // handled by the called function. So, as a callee-save
678 // register, `index_reg` _would_ eventually be saved onto
679 // the stack, but it would be too late: we would have
680 // changed its value earlier. Therefore, we manually save
681 // it here into another freely available register,
682 // `free_reg`, chosen of course among the caller-save
683 // registers (as a callee-save `free_reg` register would
684 // exhibit the same problem).
685 //
686 // Note we could have requested a temporary register from
687 // the register allocator instead; but we prefer not to, as
688 // this is a slow path, and we know we can find a
689 // caller-save register that is available.
690 Register free_reg = FindAvailableCallerSaveRegister(codegen);
691 __ Mov(free_reg.W(), index_reg);
692 index_reg = free_reg;
693 index = LocationFrom(index_reg);
694 } else {
695 // The initial register stored in `index_` has already been
696 // saved in the call to art::SlowPathCode::SaveLiveRegisters
697 // (as it is not a callee-save register), so we can freely
698 // use it.
699 }
700 // Shifting the index value contained in `index_reg` by the scale
701 // factor (2) cannot overflow in practice, as the runtime is
702 // unable to allocate object arrays with a size larger than
703 // 2^26 - 1 (that is, 2^28 - 4 bytes).
704 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
705 static_assert(
706 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
707 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
708 __ Add(index_reg, index_reg, Operand(offset_));
709 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100710 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
711 // intrinsics, `index_` is not shifted by a scale factor of 2
712 // (as in the case of ArrayGet), as it is actually an offset
713 // to an object field within an object.
714 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000715 DCHECK(instruction_->GetLocations()->Intrinsified());
716 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
717 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
718 << instruction_->AsInvoke()->GetIntrinsic();
719 DCHECK_EQ(offset_, 0U);
Roland Levillaina7426c62016-08-03 15:02:10 +0100720 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000721 }
722 }
723
724 // We're moving two or three locations to locations that could
725 // overlap, so we need a parallel move resolver.
726 InvokeRuntimeCallingConvention calling_convention;
727 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
728 parallel_move.AddMove(ref_,
729 LocationFrom(calling_convention.GetRegisterAt(0)),
730 type,
731 nullptr);
732 parallel_move.AddMove(obj_,
733 LocationFrom(calling_convention.GetRegisterAt(1)),
734 type,
735 nullptr);
736 if (index.IsValid()) {
737 parallel_move.AddMove(index,
738 LocationFrom(calling_convention.GetRegisterAt(2)),
739 Primitive::kPrimInt,
740 nullptr);
741 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
742 } else {
743 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
744 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
745 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000746 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000747 instruction_,
748 instruction_->GetDexPc(),
749 this);
750 CheckEntrypointTypes<
751 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
752 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
753
754 RestoreLiveRegisters(codegen, locations);
755
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000756 __ B(GetExitLabel());
757 }
758
759 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
760
761 private:
762 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100763 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
764 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000765 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
766 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
767 return Register(VIXLRegCodeFromART(i), kXRegSize);
768 }
769 }
770 // We shall never fail to find a free caller-save register, as
771 // there are more than two core caller-save registers on ARM64
772 // (meaning it is possible to find one which is different from
773 // `ref` and `obj`).
774 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
775 LOG(FATAL) << "Could not find a free register";
776 UNREACHABLE();
777 }
778
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000779 const Location out_;
780 const Location ref_;
781 const Location obj_;
782 const uint32_t offset_;
783 // An additional location containing an index to an array.
784 // Only used for HArrayGet and the UnsafeGetObject &
785 // UnsafeGetObjectVolatile intrinsics.
786 const Location index_;
787
788 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
789};
790
791// Slow path generating a read barrier for a GC root.
792class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
793 public:
794 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000795 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000796 DCHECK(kEmitCompilerReadBarrier);
797 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000798
799 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
800 LocationSummary* locations = instruction_->GetLocations();
801 Primitive::Type type = Primitive::kPrimNot;
802 DCHECK(locations->CanCall());
803 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000804 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
805 << "Unexpected instruction in read barrier for GC root slow path: "
806 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000807
808 __ Bind(GetEntryLabel());
809 SaveLiveRegisters(codegen, locations);
810
811 InvokeRuntimeCallingConvention calling_convention;
812 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
813 // The argument of the ReadBarrierForRootSlow is not a managed
814 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
815 // thus we need a 64-bit move here, and we cannot use
816 //
817 // arm64_codegen->MoveLocation(
818 // LocationFrom(calling_convention.GetRegisterAt(0)),
819 // root_,
820 // type);
821 //
822 // which would emit a 32-bit move, as `type` is a (32-bit wide)
823 // reference type (`Primitive::kPrimNot`).
824 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000825 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000826 instruction_,
827 instruction_->GetDexPc(),
828 this);
829 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
830 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
831
832 RestoreLiveRegisters(codegen, locations);
833 __ B(GetExitLabel());
834 }
835
836 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
837
838 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000839 const Location out_;
840 const Location root_;
841
842 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
843};
844
Alexandre Rames5319def2014-10-23 10:03:10 +0100845#undef __
846
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100847Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100848 Location next_location;
849 if (type == Primitive::kPrimVoid) {
850 LOG(FATAL) << "Unreachable type " << type;
851 }
852
Alexandre Rames542361f2015-01-29 16:57:31 +0000853 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100854 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
855 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000856 } else if (!Primitive::IsFloatingPointType(type) &&
857 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000858 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
859 } else {
860 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000861 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
862 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100863 }
864
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000865 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000866 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100867 return next_location;
868}
869
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100870Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100871 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100872}
873
Serban Constantinescu579885a2015-02-22 20:51:33 +0000874CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
875 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100876 const CompilerOptions& compiler_options,
877 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100878 : CodeGenerator(graph,
879 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000880 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000881 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100882 callee_saved_core_registers.GetList(),
883 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100884 compiler_options,
885 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100886 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800887 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100888 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000889 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000890 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100891 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000892 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000893 uint32_literals_(std::less<uint32_t>(),
894 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100895 uint64_literals_(std::less<uint64_t>(),
896 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
897 method_patches_(MethodReferenceComparator(),
898 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
899 call_patches_(MethodReferenceComparator(),
900 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
901 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000902 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
903 boot_image_string_patches_(StringReferenceValueComparator(),
904 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
905 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100906 boot_image_type_patches_(TypeReferenceValueComparator(),
907 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
908 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000909 boot_image_address_patches_(std::less<uint32_t>(),
910 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000911 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000912 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000913}
Alexandre Rames5319def2014-10-23 10:03:10 +0100914
Alexandre Rames67555f72014-11-18 10:55:16 +0000915#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100916
Zheng Xu3927c8b2015-11-18 17:46:25 +0800917void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100918 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800919 jump_table->EmitTable(this);
920 }
921}
922
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000923void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800924 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000925 // Ensure we emit the literal pool.
926 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000927
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000928 CodeGenerator::Finalize(allocator);
929}
930
Zheng Xuad4450e2015-04-17 18:48:56 +0800931void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
932 // Note: There are 6 kinds of moves:
933 // 1. constant -> GPR/FPR (non-cycle)
934 // 2. constant -> stack (non-cycle)
935 // 3. GPR/FPR -> GPR/FPR
936 // 4. GPR/FPR -> stack
937 // 5. stack -> GPR/FPR
938 // 6. stack -> stack (non-cycle)
939 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
940 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
941 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
942 // dependency.
943 vixl_temps_.Open(GetVIXLAssembler());
944}
945
946void ParallelMoveResolverARM64::FinishEmitNativeCode() {
947 vixl_temps_.Close();
948}
949
950Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
951 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
952 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
953 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
954 Location scratch = GetScratchLocation(kind);
955 if (!scratch.Equals(Location::NoLocation())) {
956 return scratch;
957 }
958 // Allocate from VIXL temp registers.
959 if (kind == Location::kRegister) {
960 scratch = LocationFrom(vixl_temps_.AcquireX());
961 } else {
962 DCHECK(kind == Location::kFpuRegister);
963 scratch = LocationFrom(vixl_temps_.AcquireD());
964 }
965 AddScratchLocation(scratch);
966 return scratch;
967}
968
969void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
970 if (loc.IsRegister()) {
971 vixl_temps_.Release(XRegisterFrom(loc));
972 } else {
973 DCHECK(loc.IsFpuRegister());
974 vixl_temps_.Release(DRegisterFrom(loc));
975 }
976 RemoveScratchLocation(loc);
977}
978
Alexandre Rames3e69f162014-12-10 10:36:50 +0000979void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100980 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +0100981 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000982}
983
Alexandre Rames5319def2014-10-23 10:03:10 +0100984void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100985 MacroAssembler* masm = GetVIXLAssembler();
986 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000987 __ Bind(&frame_entry_label_);
988
Serban Constantinescu02164b32014-11-13 14:05:07 +0000989 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
990 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100991 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000992 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000993 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000994 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000995 __ Ldr(wzr, MemOperand(temp, 0));
996 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000997 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100998
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000999 if (!HasEmptyFrame()) {
1000 int frame_size = GetFrameSize();
1001 // Stack layout:
1002 // sp[frame_size - 8] : lr.
1003 // ... : other preserved core registers.
1004 // ... : other preserved fp registers.
1005 // ... : reserved frame space.
1006 // sp[0] : current method.
1007 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001008 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001009 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1010 frame_size - GetCoreSpillSize());
1011 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1012 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001013 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001014}
1015
1016void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001017 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001018 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001019 if (!HasEmptyFrame()) {
1020 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001021 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1022 frame_size - FrameEntrySpillSize());
1023 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1024 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001025 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001026 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001027 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001028 __ Ret();
1029 GetAssembler()->cfi().RestoreState();
1030 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001031}
1032
Scott Wakeling97c72b72016-06-24 16:19:36 +01001033CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001034 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001035 return CPURegList(CPURegister::kRegister, kXRegSize,
1036 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001037}
1038
Scott Wakeling97c72b72016-06-24 16:19:36 +01001039CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001040 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1041 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001042 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1043 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001044}
1045
Alexandre Rames5319def2014-10-23 10:03:10 +01001046void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1047 __ Bind(GetLabelOf(block));
1048}
1049
Calin Juravle175dc732015-08-25 15:42:32 +01001050void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1051 DCHECK(location.IsRegister());
1052 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1053}
1054
Calin Juravlee460d1d2015-09-29 04:52:17 +01001055void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1056 if (location.IsRegister()) {
1057 locations->AddTemp(location);
1058 } else {
1059 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1060 }
1061}
1062
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001063void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001064 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001065 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001066 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001067 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001068 if (value_can_be_null) {
1069 __ Cbz(value, &done);
1070 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001071 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001072 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001073 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001074 if (value_can_be_null) {
1075 __ Bind(&done);
1076 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001077}
1078
David Brazdil58282f42016-01-14 12:45:10 +00001079void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001080 // Blocked core registers:
1081 // lr : Runtime reserved.
1082 // tr : Runtime reserved.
1083 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1084 // ip1 : VIXL core temp.
1085 // ip0 : VIXL core temp.
1086 //
1087 // Blocked fp registers:
1088 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001089 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1090 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001091 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001092 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001093 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001094
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001095 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001096 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001097 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001098 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001099
David Brazdil58282f42016-01-14 12:45:10 +00001100 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001101 // Stubs do not save callee-save floating point registers. If the graph
1102 // is debuggable, we need to deal with these registers differently. For
1103 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001104 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1105 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001106 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001107 }
1108 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001109}
1110
Alexandre Rames3e69f162014-12-10 10:36:50 +00001111size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1112 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1113 __ Str(reg, MemOperand(sp, stack_index));
1114 return kArm64WordSize;
1115}
1116
1117size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1118 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1119 __ Ldr(reg, MemOperand(sp, stack_index));
1120 return kArm64WordSize;
1121}
1122
1123size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1124 FPRegister reg = FPRegister(reg_id, kDRegSize);
1125 __ Str(reg, MemOperand(sp, stack_index));
1126 return kArm64WordSize;
1127}
1128
1129size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1130 FPRegister reg = FPRegister(reg_id, kDRegSize);
1131 __ Ldr(reg, MemOperand(sp, stack_index));
1132 return kArm64WordSize;
1133}
1134
Alexandre Rames5319def2014-10-23 10:03:10 +01001135void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001136 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001137}
1138
1139void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001140 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001141}
1142
Alexandre Rames67555f72014-11-18 10:55:16 +00001143void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001144 if (constant->IsIntConstant()) {
1145 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1146 } else if (constant->IsLongConstant()) {
1147 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1148 } else if (constant->IsNullConstant()) {
1149 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001150 } else if (constant->IsFloatConstant()) {
1151 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1152 } else {
1153 DCHECK(constant->IsDoubleConstant());
1154 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1155 }
1156}
1157
Alexandre Rames3e69f162014-12-10 10:36:50 +00001158
1159static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1160 DCHECK(constant.IsConstant());
1161 HConstant* cst = constant.GetConstant();
1162 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001163 // Null is mapped to a core W register, which we associate with kPrimInt.
1164 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001165 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1166 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1167 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1168}
1169
Calin Juravlee460d1d2015-09-29 04:52:17 +01001170void CodeGeneratorARM64::MoveLocation(Location destination,
1171 Location source,
1172 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001173 if (source.Equals(destination)) {
1174 return;
1175 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001176
1177 // A valid move can always be inferred from the destination and source
1178 // locations. When moving from and to a register, the argument type can be
1179 // used to generate 32bit instead of 64bit moves. In debug mode we also
1180 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001181 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001182
1183 if (destination.IsRegister() || destination.IsFpuRegister()) {
1184 if (unspecified_type) {
1185 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1186 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001187 (src_cst != nullptr && (src_cst->IsIntConstant()
1188 || src_cst->IsFloatConstant()
1189 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001190 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001191 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001192 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001193 // If the source is a double stack slot or a 64bit constant, a 64bit
1194 // type is appropriate. Else the source is a register, and since the
1195 // type has not been specified, we chose a 64bit type to force a 64bit
1196 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001197 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001198 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001199 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001200 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1201 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1202 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001203 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1204 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1205 __ Ldr(dst, StackOperandFrom(source));
1206 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001207 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001208 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001209 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001210 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001211 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001212 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001213 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001214 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1215 ? Primitive::kPrimLong
1216 : Primitive::kPrimInt;
1217 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1218 }
1219 } else {
1220 DCHECK(source.IsFpuRegister());
1221 if (destination.IsRegister()) {
1222 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1223 ? Primitive::kPrimDouble
1224 : Primitive::kPrimFloat;
1225 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1226 } else {
1227 DCHECK(destination.IsFpuRegister());
1228 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001229 }
1230 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001231 } else { // The destination is not a register. It must be a stack slot.
1232 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1233 if (source.IsRegister() || source.IsFpuRegister()) {
1234 if (unspecified_type) {
1235 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001236 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001237 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001238 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001239 }
1240 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001241 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1242 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1243 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001244 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001245 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1246 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001247 UseScratchRegisterScope temps(GetVIXLAssembler());
1248 HConstant* src_cst = source.GetConstant();
1249 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001250 if (src_cst->IsZeroBitPattern()) {
1251 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant()) ? xzr : wzr;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001252 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001253 if (src_cst->IsIntConstant()) {
1254 temp = temps.AcquireW();
1255 } else if (src_cst->IsLongConstant()) {
1256 temp = temps.AcquireX();
1257 } else if (src_cst->IsFloatConstant()) {
1258 temp = temps.AcquireS();
1259 } else {
1260 DCHECK(src_cst->IsDoubleConstant());
1261 temp = temps.AcquireD();
1262 }
1263 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001264 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001265 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001266 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001267 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001268 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001269 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001270 // There is generally less pressure on FP registers.
1271 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001272 __ Ldr(temp, StackOperandFrom(source));
1273 __ Str(temp, StackOperandFrom(destination));
1274 }
1275 }
1276}
1277
1278void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001279 CPURegister dst,
1280 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001281 switch (type) {
1282 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001283 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001284 break;
1285 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001286 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001287 break;
1288 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001289 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001290 break;
1291 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001292 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001293 break;
1294 case Primitive::kPrimInt:
1295 case Primitive::kPrimNot:
1296 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001297 case Primitive::kPrimFloat:
1298 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001299 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001300 __ Ldr(dst, src);
1301 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001302 case Primitive::kPrimVoid:
1303 LOG(FATAL) << "Unreachable type " << type;
1304 }
1305}
1306
Calin Juravle77520bc2015-01-12 18:45:46 +00001307void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001308 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001309 const MemOperand& src,
1310 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001311 MacroAssembler* masm = GetVIXLAssembler();
1312 BlockPoolsScope block_pools(masm);
1313 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001314 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001315 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001316
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001317 DCHECK(!src.IsPreIndex());
1318 DCHECK(!src.IsPostIndex());
1319
1320 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001321 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001322 MemOperand base = MemOperand(temp_base);
1323 switch (type) {
1324 case Primitive::kPrimBoolean:
1325 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001326 if (needs_null_check) {
1327 MaybeRecordImplicitNullCheck(instruction);
1328 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001329 break;
1330 case Primitive::kPrimByte:
1331 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001332 if (needs_null_check) {
1333 MaybeRecordImplicitNullCheck(instruction);
1334 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001335 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1336 break;
1337 case Primitive::kPrimChar:
1338 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001339 if (needs_null_check) {
1340 MaybeRecordImplicitNullCheck(instruction);
1341 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001342 break;
1343 case Primitive::kPrimShort:
1344 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001345 if (needs_null_check) {
1346 MaybeRecordImplicitNullCheck(instruction);
1347 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001348 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1349 break;
1350 case Primitive::kPrimInt:
1351 case Primitive::kPrimNot:
1352 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001353 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001354 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001355 if (needs_null_check) {
1356 MaybeRecordImplicitNullCheck(instruction);
1357 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001358 break;
1359 case Primitive::kPrimFloat:
1360 case Primitive::kPrimDouble: {
1361 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001362 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001363
1364 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1365 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001366 if (needs_null_check) {
1367 MaybeRecordImplicitNullCheck(instruction);
1368 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001369 __ Fmov(FPRegister(dst), temp);
1370 break;
1371 }
1372 case Primitive::kPrimVoid:
1373 LOG(FATAL) << "Unreachable type " << type;
1374 }
1375}
1376
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001377void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001378 CPURegister src,
1379 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001380 switch (type) {
1381 case Primitive::kPrimBoolean:
1382 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001383 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001384 break;
1385 case Primitive::kPrimChar:
1386 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001387 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001388 break;
1389 case Primitive::kPrimInt:
1390 case Primitive::kPrimNot:
1391 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001392 case Primitive::kPrimFloat:
1393 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001394 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001395 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001396 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001397 case Primitive::kPrimVoid:
1398 LOG(FATAL) << "Unreachable type " << type;
1399 }
1400}
1401
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001402void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1403 CPURegister src,
1404 const MemOperand& dst) {
1405 UseScratchRegisterScope temps(GetVIXLAssembler());
1406 Register temp_base = temps.AcquireX();
1407
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001408 DCHECK(!dst.IsPreIndex());
1409 DCHECK(!dst.IsPostIndex());
1410
1411 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001412 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001413 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001414 MemOperand base = MemOperand(temp_base);
1415 switch (type) {
1416 case Primitive::kPrimBoolean:
1417 case Primitive::kPrimByte:
1418 __ Stlrb(Register(src), base);
1419 break;
1420 case Primitive::kPrimChar:
1421 case Primitive::kPrimShort:
1422 __ Stlrh(Register(src), base);
1423 break;
1424 case Primitive::kPrimInt:
1425 case Primitive::kPrimNot:
1426 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001427 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001428 __ Stlr(Register(src), base);
1429 break;
1430 case Primitive::kPrimFloat:
1431 case Primitive::kPrimDouble: {
Alexandre Rames542361f2015-01-29 16:57:31 +00001432 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001433 Register temp_src;
1434 if (src.IsZero()) {
1435 // The zero register is used to avoid synthesizing zero constants.
1436 temp_src = Register(src);
1437 } else {
1438 DCHECK(src.IsFPRegister());
1439 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1440 __ Fmov(temp_src, FPRegister(src));
1441 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001442
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001443 __ Stlr(temp_src, base);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001444 break;
1445 }
1446 case Primitive::kPrimVoid:
1447 LOG(FATAL) << "Unreachable type " << type;
1448 }
1449}
1450
Calin Juravle175dc732015-08-25 15:42:32 +01001451void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1452 HInstruction* instruction,
1453 uint32_t dex_pc,
1454 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001455 ValidateInvokeRuntime(instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001456 GenerateInvokeRuntime(GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value());
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001457 if (EntrypointRequiresStackMap(entrypoint)) {
1458 RecordPcInfo(instruction, dex_pc, slow_path);
1459 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001460}
1461
Roland Levillaindec8f632016-07-22 17:10:06 +01001462void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1463 HInstruction* instruction,
1464 SlowPathCode* slow_path) {
1465 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001466 GenerateInvokeRuntime(entry_point_offset);
1467}
1468
1469void CodeGeneratorARM64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001470 BlockPoolsScope block_pools(GetVIXLAssembler());
1471 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1472 __ Blr(lr);
1473}
1474
Alexandre Rames67555f72014-11-18 10:55:16 +00001475void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001476 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001477 UseScratchRegisterScope temps(GetVIXLAssembler());
1478 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001479 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1480
Serban Constantinescu02164b32014-11-13 14:05:07 +00001481 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001482 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1483 __ Add(temp, class_reg, status_offset);
1484 __ Ldar(temp, HeapOperand(temp));
1485 __ Cmp(temp, mirror::Class::kStatusInitialized);
1486 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001487 __ Bind(slow_path->GetExitLabel());
1488}
Alexandre Rames5319def2014-10-23 10:03:10 +01001489
Roland Levillain44015862016-01-22 11:47:17 +00001490void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001491 BarrierType type = BarrierAll;
1492
1493 switch (kind) {
1494 case MemBarrierKind::kAnyAny:
1495 case MemBarrierKind::kAnyStore: {
1496 type = BarrierAll;
1497 break;
1498 }
1499 case MemBarrierKind::kLoadAny: {
1500 type = BarrierReads;
1501 break;
1502 }
1503 case MemBarrierKind::kStoreStore: {
1504 type = BarrierWrites;
1505 break;
1506 }
1507 default:
1508 LOG(FATAL) << "Unexpected memory barrier " << kind;
1509 }
1510 __ Dmb(InnerShareable, type);
1511}
1512
Serban Constantinescu02164b32014-11-13 14:05:07 +00001513void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1514 HBasicBlock* successor) {
1515 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001516 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1517 if (slow_path == nullptr) {
1518 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1519 instruction->SetSlowPath(slow_path);
1520 codegen_->AddSlowPath(slow_path);
1521 if (successor != nullptr) {
1522 DCHECK(successor->IsLoopHeader());
1523 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1524 }
1525 } else {
1526 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1527 }
1528
Serban Constantinescu02164b32014-11-13 14:05:07 +00001529 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1530 Register temp = temps.AcquireW();
1531
Andreas Gampe542451c2016-07-26 09:02:02 -07001532 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001533 if (successor == nullptr) {
1534 __ Cbnz(temp, slow_path->GetEntryLabel());
1535 __ Bind(slow_path->GetReturnLabel());
1536 } else {
1537 __ Cbz(temp, codegen_->GetLabelOf(successor));
1538 __ B(slow_path->GetEntryLabel());
1539 // slow_path will return to GetLabelOf(successor).
1540 }
1541}
1542
Alexandre Rames5319def2014-10-23 10:03:10 +01001543InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1544 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001545 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001546 assembler_(codegen->GetAssembler()),
1547 codegen_(codegen) {}
1548
1549#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001550 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001551
1552#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1553
1554enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001555 // Using a base helps identify when we hit such breakpoints.
1556 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001557#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1558 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1559#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1560};
1561
1562#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001563 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001564 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1565 } \
1566 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1567 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1568 locations->SetOut(Location::Any()); \
1569 }
1570 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1571#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1572
1573#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001574#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001575
Alexandre Rames67555f72014-11-18 10:55:16 +00001576void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001577 DCHECK_EQ(instr->InputCount(), 2U);
1578 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1579 Primitive::Type type = instr->GetResultType();
1580 switch (type) {
1581 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001582 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001583 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001584 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001585 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001586 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001587
1588 case Primitive::kPrimFloat:
1589 case Primitive::kPrimDouble:
1590 locations->SetInAt(0, Location::RequiresFpuRegister());
1591 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001592 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001593 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001594
Alexandre Rames5319def2014-10-23 10:03:10 +01001595 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001596 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001597 }
1598}
1599
Alexandre Rames09a99962015-04-15 11:47:56 +01001600void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001601 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1602
1603 bool object_field_get_with_read_barrier =
1604 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001605 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001606 new (GetGraph()->GetArena()) LocationSummary(instruction,
1607 object_field_get_with_read_barrier ?
1608 LocationSummary::kCallOnSlowPath :
1609 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01001610 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
1611 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
1612 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001613 locations->SetInAt(0, Location::RequiresRegister());
1614 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1615 locations->SetOut(Location::RequiresFpuRegister());
1616 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001617 // The output overlaps for an object field get when read barriers
1618 // are enabled: we do not want the load to overwrite the object's
1619 // location, as we need it to emit the read barrier.
1620 locations->SetOut(
1621 Location::RequiresRegister(),
1622 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001623 }
1624}
1625
1626void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1627 const FieldInfo& field_info) {
1628 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001629 LocationSummary* locations = instruction->GetLocations();
1630 Location base_loc = locations->InAt(0);
1631 Location out = locations->Out();
1632 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001633 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001634 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001635 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001636
Roland Levillain44015862016-01-22 11:47:17 +00001637 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1638 // Object FieldGet with Baker's read barrier case.
1639 MacroAssembler* masm = GetVIXLAssembler();
1640 UseScratchRegisterScope temps(masm);
1641 // /* HeapReference<Object> */ out = *(base + offset)
1642 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1643 Register temp = temps.AcquireW();
1644 // Note that potential implicit null checks are handled in this
1645 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1646 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1647 instruction,
1648 out,
1649 base,
1650 offset,
1651 temp,
1652 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001653 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001654 } else {
1655 // General case.
1656 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001657 // Note that a potential implicit null check is handled in this
1658 // CodeGeneratorARM64::LoadAcquire call.
1659 // NB: LoadAcquire will record the pc info if needed.
1660 codegen_->LoadAcquire(
1661 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001662 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001663 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001664 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001665 }
Roland Levillain44015862016-01-22 11:47:17 +00001666 if (field_type == Primitive::kPrimNot) {
1667 // If read barriers are enabled, emit read barriers other than
1668 // Baker's using a slow path (and also unpoison the loaded
1669 // reference, if heap poisoning is enabled).
1670 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1671 }
Roland Levillain4d027112015-07-01 15:41:14 +01001672 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001673}
1674
1675void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1676 LocationSummary* locations =
1677 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1678 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001679 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
1680 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
1681 } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001682 locations->SetInAt(1, Location::RequiresFpuRegister());
1683 } else {
1684 locations->SetInAt(1, Location::RequiresRegister());
1685 }
1686}
1687
1688void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001689 const FieldInfo& field_info,
1690 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001691 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001692 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001693
1694 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001695 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001696 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001697 Offset offset = field_info.GetFieldOffset();
1698 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001699
Roland Levillain4d027112015-07-01 15:41:14 +01001700 {
1701 // We use a block to end the scratch scope before the write barrier, thus
1702 // freeing the temporary registers so they can be used in `MarkGCCard`.
1703 UseScratchRegisterScope temps(GetVIXLAssembler());
1704
1705 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1706 DCHECK(value.IsW());
1707 Register temp = temps.AcquireW();
1708 __ Mov(temp, value.W());
1709 GetAssembler()->PoisonHeapReference(temp.W());
1710 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001711 }
Roland Levillain4d027112015-07-01 15:41:14 +01001712
1713 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001714 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1715 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001716 } else {
1717 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1718 codegen_->MaybeRecordImplicitNullCheck(instruction);
1719 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001720 }
1721
1722 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001723 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001724 }
1725}
1726
Alexandre Rames67555f72014-11-18 10:55:16 +00001727void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001728 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001729
1730 switch (type) {
1731 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001732 case Primitive::kPrimLong: {
1733 Register dst = OutputRegister(instr);
1734 Register lhs = InputRegisterAt(instr, 0);
1735 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001736 if (instr->IsAdd()) {
1737 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001738 } else if (instr->IsAnd()) {
1739 __ And(dst, lhs, rhs);
1740 } else if (instr->IsOr()) {
1741 __ Orr(dst, lhs, rhs);
1742 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001743 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001744 } else if (instr->IsRor()) {
1745 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001746 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001747 __ Ror(dst, lhs, shift);
1748 } else {
1749 // Ensure shift distance is in the same size register as the result. If
1750 // we are rotating a long and the shift comes in a w register originally,
1751 // we don't need to sxtw for use as an x since the shift distances are
1752 // all & reg_bits - 1.
1753 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1754 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001755 } else {
1756 DCHECK(instr->IsXor());
1757 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001758 }
1759 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001760 }
1761 case Primitive::kPrimFloat:
1762 case Primitive::kPrimDouble: {
1763 FPRegister dst = OutputFPRegister(instr);
1764 FPRegister lhs = InputFPRegisterAt(instr, 0);
1765 FPRegister rhs = InputFPRegisterAt(instr, 1);
1766 if (instr->IsAdd()) {
1767 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001768 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001769 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001770 } else {
1771 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001772 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001773 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001774 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001775 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001776 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001777 }
1778}
1779
Serban Constantinescu02164b32014-11-13 14:05:07 +00001780void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1781 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1782
1783 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1784 Primitive::Type type = instr->GetResultType();
1785 switch (type) {
1786 case Primitive::kPrimInt:
1787 case Primitive::kPrimLong: {
1788 locations->SetInAt(0, Location::RequiresRegister());
1789 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1790 locations->SetOut(Location::RequiresRegister());
1791 break;
1792 }
1793 default:
1794 LOG(FATAL) << "Unexpected shift type " << type;
1795 }
1796}
1797
1798void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1799 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1800
1801 Primitive::Type type = instr->GetType();
1802 switch (type) {
1803 case Primitive::kPrimInt:
1804 case Primitive::kPrimLong: {
1805 Register dst = OutputRegister(instr);
1806 Register lhs = InputRegisterAt(instr, 0);
1807 Operand rhs = InputOperandAt(instr, 1);
1808 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001809 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00001810 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001811 if (instr->IsShl()) {
1812 __ Lsl(dst, lhs, shift_value);
1813 } else if (instr->IsShr()) {
1814 __ Asr(dst, lhs, shift_value);
1815 } else {
1816 __ Lsr(dst, lhs, shift_value);
1817 }
1818 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001819 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001820
1821 if (instr->IsShl()) {
1822 __ Lsl(dst, lhs, rhs_reg);
1823 } else if (instr->IsShr()) {
1824 __ Asr(dst, lhs, rhs_reg);
1825 } else {
1826 __ Lsr(dst, lhs, rhs_reg);
1827 }
1828 }
1829 break;
1830 }
1831 default:
1832 LOG(FATAL) << "Unexpected shift operation type " << type;
1833 }
1834}
1835
Alexandre Rames5319def2014-10-23 10:03:10 +01001836void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001837 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001838}
1839
1840void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001841 HandleBinaryOp(instruction);
1842}
1843
1844void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1845 HandleBinaryOp(instruction);
1846}
1847
1848void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1849 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001850}
1851
Artem Serov7fc63502016-02-09 17:15:29 +00001852void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001853 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1854 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1855 locations->SetInAt(0, Location::RequiresRegister());
1856 // There is no immediate variant of negated bitwise instructions in AArch64.
1857 locations->SetInAt(1, Location::RequiresRegister());
1858 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1859}
1860
Artem Serov7fc63502016-02-09 17:15:29 +00001861void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001862 Register dst = OutputRegister(instr);
1863 Register lhs = InputRegisterAt(instr, 0);
1864 Register rhs = InputRegisterAt(instr, 1);
1865
1866 switch (instr->GetOpKind()) {
1867 case HInstruction::kAnd:
1868 __ Bic(dst, lhs, rhs);
1869 break;
1870 case HInstruction::kOr:
1871 __ Orn(dst, lhs, rhs);
1872 break;
1873 case HInstruction::kXor:
1874 __ Eon(dst, lhs, rhs);
1875 break;
1876 default:
1877 LOG(FATAL) << "Unreachable";
1878 }
1879}
1880
Alexandre Rames8626b742015-11-25 16:28:08 +00001881void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1882 HArm64DataProcWithShifterOp* instruction) {
1883 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1884 instruction->GetType() == Primitive::kPrimLong);
1885 LocationSummary* locations =
1886 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1887 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1888 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1889 } else {
1890 locations->SetInAt(0, Location::RequiresRegister());
1891 }
1892 locations->SetInAt(1, Location::RequiresRegister());
1893 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1894}
1895
1896void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1897 HArm64DataProcWithShifterOp* instruction) {
1898 Primitive::Type type = instruction->GetType();
1899 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1900 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1901 Register out = OutputRegister(instruction);
1902 Register left;
1903 if (kind != HInstruction::kNeg) {
1904 left = InputRegisterAt(instruction, 0);
1905 }
1906 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1907 // shifter operand operation, the IR generating `right_reg` (input to the type
1908 // conversion) can have a different type from the current instruction's type,
1909 // so we manually indicate the type.
1910 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001911 int64_t shift_amount = instruction->GetShiftAmount() &
1912 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001913
1914 Operand right_operand(0);
1915
1916 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1917 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1918 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1919 } else {
1920 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1921 }
1922
1923 // Logical binary operations do not support extension operations in the
1924 // operand. Note that VIXL would still manage if it was passed by generating
1925 // the extension as a separate instruction.
1926 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1927 DCHECK(!right_operand.IsExtendedRegister() ||
1928 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1929 kind != HInstruction::kNeg));
1930 switch (kind) {
1931 case HInstruction::kAdd:
1932 __ Add(out, left, right_operand);
1933 break;
1934 case HInstruction::kAnd:
1935 __ And(out, left, right_operand);
1936 break;
1937 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001938 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001939 __ Neg(out, right_operand);
1940 break;
1941 case HInstruction::kOr:
1942 __ Orr(out, left, right_operand);
1943 break;
1944 case HInstruction::kSub:
1945 __ Sub(out, left, right_operand);
1946 break;
1947 case HInstruction::kXor:
1948 __ Eor(out, left, right_operand);
1949 break;
1950 default:
1951 LOG(FATAL) << "Unexpected operation kind: " << kind;
1952 UNREACHABLE();
1953 }
1954}
1955
Artem Serov328429f2016-07-06 16:23:04 +01001956void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00001957 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
1958 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001959 LocationSummary* locations =
1960 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1961 locations->SetInAt(0, Location::RequiresRegister());
1962 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1963 locations->SetOut(Location::RequiresRegister());
1964}
1965
Roland Levillain4a3aa572016-08-15 13:17:06 +00001966void InstructionCodeGeneratorARM64::VisitIntermediateAddress(
1967 HIntermediateAddress* instruction) {
1968 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
1969 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001970 __ Add(OutputRegister(instruction),
1971 InputRegisterAt(instruction, 0),
1972 Operand(InputOperandAt(instruction, 1)));
1973}
1974
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001975void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001976 LocationSummary* locations =
1977 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001978 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
1979 if (instr->GetOpKind() == HInstruction::kSub &&
1980 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00001981 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001982 // Don't allocate register for Mneg instruction.
1983 } else {
1984 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
1985 Location::RequiresRegister());
1986 }
1987 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
1988 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00001989 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1990}
1991
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001992void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001993 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001994 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
1995 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00001996
1997 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
1998 // This fixup should be carried out for all multiply-accumulate instructions:
1999 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2000 if (instr->GetType() == Primitive::kPrimLong &&
2001 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2002 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002003 vixl::aarch64::Instruction* prev =
2004 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002005 if (prev->IsLoadOrStore()) {
2006 // Make sure we emit only exactly one nop.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002007 vixl::aarch64::CodeBufferCheckScope scope(masm,
2008 kInstructionSize,
2009 vixl::aarch64::CodeBufferCheckScope::kCheck,
2010 vixl::aarch64::CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002011 __ nop();
2012 }
2013 }
2014
2015 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002016 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002017 __ Madd(res, mul_left, mul_right, accumulator);
2018 } else {
2019 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002020 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002021 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002022 __ Mneg(res, mul_left, mul_right);
2023 } else {
2024 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2025 __ Msub(res, mul_left, mul_right, accumulator);
2026 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002027 }
2028}
2029
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002030void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002031 bool object_array_get_with_read_barrier =
2032 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002033 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002034 new (GetGraph()->GetArena()) LocationSummary(instruction,
2035 object_array_get_with_read_barrier ?
2036 LocationSummary::kCallOnSlowPath :
2037 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002038 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2039 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
2040 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002041 locations->SetInAt(0, Location::RequiresRegister());
2042 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002043 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2044 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2045 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002046 // The output overlaps in the case of an object array get with
2047 // read barriers enabled: we do not want the move to overwrite the
2048 // array's location, as we need it to emit the read barrier.
2049 locations->SetOut(
2050 Location::RequiresRegister(),
2051 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002052 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002053}
2054
2055void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002056 Primitive::Type type = instruction->GetType();
2057 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002058 LocationSummary* locations = instruction->GetLocations();
2059 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002060 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002061 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002062
Alexandre Ramesd921d642015-04-16 15:07:16 +01002063 MacroAssembler* masm = GetVIXLAssembler();
2064 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002065 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002066 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002067
Roland Levillain44015862016-01-22 11:47:17 +00002068 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2069 // Object ArrayGet with Baker's read barrier case.
2070 Register temp = temps.AcquireW();
Roland Levillain4a3aa572016-08-15 13:17:06 +00002071 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
2072 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Roland Levillain44015862016-01-22 11:47:17 +00002073 // Note that a potential implicit null check is handled in the
2074 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2075 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2076 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002077 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002078 // General case.
2079 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002080 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002081 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2082 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002083 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002084 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002085 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002086 // The read barrier instrumentation does not support the
2087 // HIntermediateAddress instruction yet.
2088 DCHECK(!kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00002089 // We do not need to compute the intermediate address from the array: the
2090 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002091 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002092 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002093 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002094 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2095 }
2096 temp = obj;
2097 } else {
2098 __ Add(temp, obj, offset);
2099 }
2100 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2101 }
2102
2103 codegen_->Load(type, OutputCPURegister(instruction), source);
2104 codegen_->MaybeRecordImplicitNullCheck(instruction);
2105
2106 if (type == Primitive::kPrimNot) {
2107 static_assert(
2108 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2109 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2110 Location obj_loc = locations->InAt(0);
2111 if (index.IsConstant()) {
2112 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2113 } else {
2114 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2115 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002116 }
Roland Levillain4d027112015-07-01 15:41:14 +01002117 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002118}
2119
Alexandre Rames5319def2014-10-23 10:03:10 +01002120void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2121 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2122 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002123 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002124}
2125
2126void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002127 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002128 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Markodce016e2016-04-28 13:10:02 +01002129 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002130 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002131}
2132
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002133void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002134 Primitive::Type value_type = instruction->GetComponentType();
2135
2136 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002137 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2138 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002139 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002140 LocationSummary::kCallOnSlowPath :
2141 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002142 locations->SetInAt(0, Location::RequiresRegister());
2143 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002144 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2145 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2146 } else if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002147 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002148 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002149 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002150 }
Roland Levillaina8c6d702016-08-26 11:17:44 +01002151 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && (value_type == Primitive::kPrimNot)) {
Roland Levillain16d9f942016-08-25 17:27:56 +01002152 // Additional temporary registers for a Baker read barrier.
2153 locations->AddTemp(Location::RequiresRegister());
2154 locations->AddTemp(Location::RequiresRegister());
2155 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002156}
2157
2158void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2159 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002160 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002161 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002162 bool needs_write_barrier =
2163 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002164
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002165 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002166 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002167 CPURegister source = value;
2168 Location index = locations->InAt(1);
2169 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2170 MemOperand destination = HeapOperand(array);
2171 MacroAssembler* masm = GetVIXLAssembler();
2172 BlockPoolsScope block_pools(masm);
2173
2174 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002175 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002176 if (index.IsConstant()) {
2177 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2178 destination = HeapOperand(array, offset);
2179 } else {
2180 UseScratchRegisterScope temps(masm);
2181 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002182 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002183 // The read barrier instrumentation does not support the
2184 // HIntermediateAddress instruction yet.
2185 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002186 // We do not need to compute the intermediate address from the array: the
2187 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002188 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002189 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002190 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002191 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2192 }
2193 temp = array;
2194 } else {
2195 __ Add(temp, array, offset);
2196 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002197 destination = HeapOperand(temp,
2198 XRegisterFrom(index),
2199 LSL,
2200 Primitive::ComponentSizeShift(value_type));
2201 }
2202 codegen_->Store(value_type, value, destination);
2203 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002204 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002205 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002206 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002207 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002208 {
2209 // We use a block to end the scratch scope before the write barrier, thus
2210 // freeing the temporary registers so they can be used in `MarkGCCard`.
2211 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002212 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002213 if (index.IsConstant()) {
2214 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002215 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002216 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002217 destination = HeapOperand(temp,
2218 XRegisterFrom(index),
2219 LSL,
2220 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002221 }
2222
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002223 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2224 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2225 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2226
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002227 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002228 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2229 codegen_->AddSlowPath(slow_path);
2230 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002231 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002232 __ Cbnz(Register(value), &non_zero);
2233 if (!index.IsConstant()) {
2234 __ Add(temp, array, offset);
2235 }
2236 __ Str(wzr, destination);
2237 codegen_->MaybeRecordImplicitNullCheck(instruction);
2238 __ B(&done);
2239 __ Bind(&non_zero);
2240 }
2241
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002242 if (kEmitCompilerReadBarrier) {
Roland Levillain16d9f942016-08-25 17:27:56 +01002243 if (!kUseBakerReadBarrier) {
2244 // When (non-Baker) read barriers are enabled, the type
2245 // checking instrumentation requires two read barriers
2246 // generated by CodeGeneratorARM64::GenerateReadBarrierSlow:
2247 //
2248 // __ Mov(temp2, temp);
2249 // // /* HeapReference<Class> */ temp = temp->component_type_
2250 // __ Ldr(temp, HeapOperand(temp, component_offset));
2251 // codegen_->GenerateReadBarrierSlow(
2252 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2253 //
2254 // // /* HeapReference<Class> */ temp2 = value->klass_
2255 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2256 // codegen_->GenerateReadBarrierSlow(
2257 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2258 //
2259 // __ Cmp(temp, temp2);
2260 //
2261 // However, the second read barrier may trash `temp`, as it
2262 // is a temporary register, and as such would not be saved
2263 // along with live registers before calling the runtime (nor
2264 // restored afterwards). So in this case, we bail out and
2265 // delegate the work to the array set slow path.
2266 //
2267 // TODO: Extend the register allocator to support a new
2268 // "(locally) live temp" location so as to avoid always
2269 // going into the slow path when read barriers are enabled?
2270 //
2271 // There is no such problem with Baker read barriers (see below).
2272 __ B(slow_path->GetEntryLabel());
2273 } else {
2274 // Note that we cannot use `temps` (instance of VIXL's
2275 // UseScratchRegisterScope) to allocate `temp2` because
2276 // the Baker read barriers generated by
2277 // GenerateFieldLoadWithBakerReadBarrier below also use
2278 // that facility to allocate a temporary register, thus
2279 // making VIXL's scratch register pool empty.
2280 Location temp2_loc = locations->GetTemp(0);
2281 Register temp2 = WRegisterFrom(temp2_loc);
2282
2283 // Note: Because it is acquired from VIXL's scratch register
2284 // pool, `temp` might be IP0, and thus cannot be used as
2285 // `ref` argument of GenerateFieldLoadWithBakerReadBarrier
2286 // calls below (see ReadBarrierMarkSlowPathARM64 for more
2287 // details).
2288
2289 // /* HeapReference<Class> */ temp2 = array->klass_
2290 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2291 temp2_loc,
2292 array,
2293 class_offset,
2294 temp,
2295 /* needs_null_check */ true,
2296 /* use_load_acquire */ false);
2297
2298 // /* HeapReference<Class> */ temp2 = temp2->component_type_
2299 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2300 temp2_loc,
2301 temp2,
2302 component_offset,
2303 temp,
2304 /* needs_null_check */ false,
2305 /* use_load_acquire */ false);
2306 // For the same reason that we request `temp2` from the
2307 // register allocator above, we cannot get `temp3` from
2308 // VIXL's scratch register pool.
2309 Location temp3_loc = locations->GetTemp(1);
2310 Register temp3 = WRegisterFrom(temp3_loc);
2311 // Register `temp2` is not trashed by the read barrier
2312 // emitted by GenerateFieldLoadWithBakerReadBarrier below,
2313 // as that method produces a call to a ReadBarrierMarkRegX
2314 // entry point, which saves all potentially live registers,
2315 // including temporaries such a `temp2`.
2316 // /* HeapReference<Class> */ temp3 = register_value->klass_
2317 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2318 temp3_loc,
2319 value.W(),
2320 class_offset,
2321 temp,
2322 /* needs_null_check */ false,
2323 /* use_load_acquire */ false);
2324 // If heap poisoning is enabled, `temp2` and `temp3` have
2325 // been unpoisoned by the the previous calls to
2326 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier.
2327 __ Cmp(temp2, temp3);
2328
2329 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2330 vixl::aarch64::Label do_put;
2331 __ B(eq, &do_put);
2332 // We do not need to emit a read barrier for the
2333 // following heap reference load, as `temp2` is only used
2334 // in a comparison with null below, and this reference
2335 // is not kept afterwards.
2336 // /* HeapReference<Class> */ temp = temp2->super_class_
2337 __ Ldr(temp, HeapOperand(temp2, super_offset));
2338 // If heap poisoning is enabled, no need to unpoison
2339 // `temp`, as we are comparing against null below.
2340 __ Cbnz(temp, slow_path->GetEntryLabel());
2341 __ Bind(&do_put);
2342 } else {
2343 __ B(ne, slow_path->GetEntryLabel());
2344 }
2345 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002346 } else {
Roland Levillain16d9f942016-08-25 17:27:56 +01002347 // Non read barrier code.
2348
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002349 Register temp2 = temps.AcquireSameSizeAs(array);
2350 // /* HeapReference<Class> */ temp = array->klass_
2351 __ Ldr(temp, HeapOperand(array, class_offset));
2352 codegen_->MaybeRecordImplicitNullCheck(instruction);
2353 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2354
2355 // /* HeapReference<Class> */ temp = temp->component_type_
2356 __ Ldr(temp, HeapOperand(temp, component_offset));
2357 // /* HeapReference<Class> */ temp2 = value->klass_
2358 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2359 // If heap poisoning is enabled, no need to unpoison `temp`
2360 // nor `temp2`, as we are comparing two poisoned references.
2361 __ Cmp(temp, temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01002362 temps.Release(temp2);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002363
2364 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002365 vixl::aarch64::Label do_put;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002366 __ B(eq, &do_put);
2367 // If heap poisoning is enabled, the `temp` reference has
2368 // not been unpoisoned yet; unpoison it now.
2369 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2370
2371 // /* HeapReference<Class> */ temp = temp->super_class_
2372 __ Ldr(temp, HeapOperand(temp, super_offset));
2373 // If heap poisoning is enabled, no need to unpoison
2374 // `temp`, as we are comparing against null below.
2375 __ Cbnz(temp, slow_path->GetEntryLabel());
2376 __ Bind(&do_put);
2377 } else {
2378 __ B(ne, slow_path->GetEntryLabel());
2379 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002380 }
2381 }
2382
2383 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002384 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002385 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002386 __ Mov(temp2, value.W());
2387 GetAssembler()->PoisonHeapReference(temp2);
2388 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002389 }
2390
2391 if (!index.IsConstant()) {
2392 __ Add(temp, array, offset);
2393 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002394 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002395
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002396 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002397 codegen_->MaybeRecordImplicitNullCheck(instruction);
2398 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002399 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002400
2401 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2402
2403 if (done.IsLinked()) {
2404 __ Bind(&done);
2405 }
2406
2407 if (slow_path != nullptr) {
2408 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002409 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002410 }
2411}
2412
Alexandre Rames67555f72014-11-18 10:55:16 +00002413void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002414 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2415 ? LocationSummary::kCallOnSlowPath
2416 : LocationSummary::kNoCall;
2417 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002418 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002419 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002420 if (instruction->HasUses()) {
2421 locations->SetOut(Location::SameAsFirstInput());
2422 }
2423}
2424
2425void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002426 BoundsCheckSlowPathARM64* slow_path =
2427 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002428 codegen_->AddSlowPath(slow_path);
2429
2430 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2431 __ B(slow_path->GetEntryLabel(), hs);
2432}
2433
Alexandre Rames67555f72014-11-18 10:55:16 +00002434void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2435 LocationSummary* locations =
2436 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2437 locations->SetInAt(0, Location::RequiresRegister());
2438 if (check->HasUses()) {
2439 locations->SetOut(Location::SameAsFirstInput());
2440 }
2441}
2442
2443void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2444 // We assume the class is not null.
2445 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2446 check->GetLoadClass(), check, check->GetDexPc(), true);
2447 codegen_->AddSlowPath(slow_path);
2448 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2449}
2450
Roland Levillain1a653882016-03-18 18:05:57 +00002451static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2452 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2453 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2454}
2455
2456void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2457 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2458 Location rhs_loc = instruction->GetLocations()->InAt(1);
2459 if (rhs_loc.IsConstant()) {
2460 // 0.0 is the only immediate that can be encoded directly in
2461 // an FCMP instruction.
2462 //
2463 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2464 // specify that in a floating-point comparison, positive zero
2465 // and negative zero are considered equal, so we can use the
2466 // literal 0.0 for both cases here.
2467 //
2468 // Note however that some methods (Float.equal, Float.compare,
2469 // Float.compareTo, Double.equal, Double.compare,
2470 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2471 // StrictMath.min) consider 0.0 to be (strictly) greater than
2472 // -0.0. So if we ever translate calls to these methods into a
2473 // HCompare instruction, we must handle the -0.0 case with
2474 // care here.
2475 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2476 __ Fcmp(lhs_reg, 0.0);
2477 } else {
2478 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2479 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002480}
2481
Serban Constantinescu02164b32014-11-13 14:05:07 +00002482void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002483 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002484 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2485 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002486 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002487 case Primitive::kPrimBoolean:
2488 case Primitive::kPrimByte:
2489 case Primitive::kPrimShort:
2490 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002491 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002492 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002493 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002494 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002495 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2496 break;
2497 }
2498 case Primitive::kPrimFloat:
2499 case Primitive::kPrimDouble: {
2500 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002501 locations->SetInAt(1,
2502 IsFloatingPointZeroConstant(compare->InputAt(1))
2503 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2504 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002505 locations->SetOut(Location::RequiresRegister());
2506 break;
2507 }
2508 default:
2509 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2510 }
2511}
2512
2513void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2514 Primitive::Type in_type = compare->InputAt(0)->GetType();
2515
2516 // 0 if: left == right
2517 // 1 if: left > right
2518 // -1 if: left < right
2519 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002520 case Primitive::kPrimBoolean:
2521 case Primitive::kPrimByte:
2522 case Primitive::kPrimShort:
2523 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002524 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002525 case Primitive::kPrimLong: {
2526 Register result = OutputRegister(compare);
2527 Register left = InputRegisterAt(compare, 0);
2528 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002529 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002530 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2531 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002532 break;
2533 }
2534 case Primitive::kPrimFloat:
2535 case Primitive::kPrimDouble: {
2536 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002537 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002538 __ Cset(result, ne);
2539 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002540 break;
2541 }
2542 default:
2543 LOG(FATAL) << "Unimplemented compare type " << in_type;
2544 }
2545}
2546
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002547void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002548 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002549
2550 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2551 locations->SetInAt(0, Location::RequiresFpuRegister());
2552 locations->SetInAt(1,
2553 IsFloatingPointZeroConstant(instruction->InputAt(1))
2554 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2555 : Location::RequiresFpuRegister());
2556 } else {
2557 // Integer cases.
2558 locations->SetInAt(0, Location::RequiresRegister());
2559 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2560 }
2561
David Brazdilb3e773e2016-01-26 11:28:37 +00002562 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002563 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002564 }
2565}
2566
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002567void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002568 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002569 return;
2570 }
2571
2572 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002573 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002574 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002575
Roland Levillain7f63c522015-07-13 15:54:55 +00002576 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002577 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002578 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002579 } else {
2580 // Integer cases.
2581 Register lhs = InputRegisterAt(instruction, 0);
2582 Operand rhs = InputOperandAt(instruction, 1);
2583 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002584 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002585 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002586}
2587
2588#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2589 M(Equal) \
2590 M(NotEqual) \
2591 M(LessThan) \
2592 M(LessThanOrEqual) \
2593 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002594 M(GreaterThanOrEqual) \
2595 M(Below) \
2596 M(BelowOrEqual) \
2597 M(Above) \
2598 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002599#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002600void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2601void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002602FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002603#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002604#undef FOR_EACH_CONDITION_INSTRUCTION
2605
Zheng Xuc6667102015-05-15 16:08:45 +08002606void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2607 DCHECK(instruction->IsDiv() || instruction->IsRem());
2608
2609 LocationSummary* locations = instruction->GetLocations();
2610 Location second = locations->InAt(1);
2611 DCHECK(second.IsConstant());
2612
2613 Register out = OutputRegister(instruction);
2614 Register dividend = InputRegisterAt(instruction, 0);
2615 int64_t imm = Int64FromConstant(second.GetConstant());
2616 DCHECK(imm == 1 || imm == -1);
2617
2618 if (instruction->IsRem()) {
2619 __ Mov(out, 0);
2620 } else {
2621 if (imm == 1) {
2622 __ Mov(out, dividend);
2623 } else {
2624 __ Neg(out, dividend);
2625 }
2626 }
2627}
2628
2629void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2630 DCHECK(instruction->IsDiv() || instruction->IsRem());
2631
2632 LocationSummary* locations = instruction->GetLocations();
2633 Location second = locations->InAt(1);
2634 DCHECK(second.IsConstant());
2635
2636 Register out = OutputRegister(instruction);
2637 Register dividend = InputRegisterAt(instruction, 0);
2638 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002639 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002640 int ctz_imm = CTZ(abs_imm);
2641
2642 UseScratchRegisterScope temps(GetVIXLAssembler());
2643 Register temp = temps.AcquireSameSizeAs(out);
2644
2645 if (instruction->IsDiv()) {
2646 __ Add(temp, dividend, abs_imm - 1);
2647 __ Cmp(dividend, 0);
2648 __ Csel(out, temp, dividend, lt);
2649 if (imm > 0) {
2650 __ Asr(out, out, ctz_imm);
2651 } else {
2652 __ Neg(out, Operand(out, ASR, ctz_imm));
2653 }
2654 } else {
2655 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2656 __ Asr(temp, dividend, bits - 1);
2657 __ Lsr(temp, temp, bits - ctz_imm);
2658 __ Add(out, dividend, temp);
2659 __ And(out, out, abs_imm - 1);
2660 __ Sub(out, out, temp);
2661 }
2662}
2663
2664void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2665 DCHECK(instruction->IsDiv() || instruction->IsRem());
2666
2667 LocationSummary* locations = instruction->GetLocations();
2668 Location second = locations->InAt(1);
2669 DCHECK(second.IsConstant());
2670
2671 Register out = OutputRegister(instruction);
2672 Register dividend = InputRegisterAt(instruction, 0);
2673 int64_t imm = Int64FromConstant(second.GetConstant());
2674
2675 Primitive::Type type = instruction->GetResultType();
2676 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2677
2678 int64_t magic;
2679 int shift;
2680 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2681
2682 UseScratchRegisterScope temps(GetVIXLAssembler());
2683 Register temp = temps.AcquireSameSizeAs(out);
2684
2685 // temp = get_high(dividend * magic)
2686 __ Mov(temp, magic);
2687 if (type == Primitive::kPrimLong) {
2688 __ Smulh(temp, dividend, temp);
2689 } else {
2690 __ Smull(temp.X(), dividend, temp);
2691 __ Lsr(temp.X(), temp.X(), 32);
2692 }
2693
2694 if (imm > 0 && magic < 0) {
2695 __ Add(temp, temp, dividend);
2696 } else if (imm < 0 && magic > 0) {
2697 __ Sub(temp, temp, dividend);
2698 }
2699
2700 if (shift != 0) {
2701 __ Asr(temp, temp, shift);
2702 }
2703
2704 if (instruction->IsDiv()) {
2705 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2706 } else {
2707 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2708 // TODO: Strength reduction for msub.
2709 Register temp_imm = temps.AcquireSameSizeAs(out);
2710 __ Mov(temp_imm, imm);
2711 __ Msub(out, temp, temp_imm, dividend);
2712 }
2713}
2714
2715void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2716 DCHECK(instruction->IsDiv() || instruction->IsRem());
2717 Primitive::Type type = instruction->GetResultType();
2718 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2719
2720 LocationSummary* locations = instruction->GetLocations();
2721 Register out = OutputRegister(instruction);
2722 Location second = locations->InAt(1);
2723
2724 if (second.IsConstant()) {
2725 int64_t imm = Int64FromConstant(second.GetConstant());
2726
2727 if (imm == 0) {
2728 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2729 } else if (imm == 1 || imm == -1) {
2730 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002731 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002732 DivRemByPowerOfTwo(instruction);
2733 } else {
2734 DCHECK(imm <= -2 || imm >= 2);
2735 GenerateDivRemWithAnyConstant(instruction);
2736 }
2737 } else {
2738 Register dividend = InputRegisterAt(instruction, 0);
2739 Register divisor = InputRegisterAt(instruction, 1);
2740 if (instruction->IsDiv()) {
2741 __ Sdiv(out, dividend, divisor);
2742 } else {
2743 UseScratchRegisterScope temps(GetVIXLAssembler());
2744 Register temp = temps.AcquireSameSizeAs(out);
2745 __ Sdiv(temp, dividend, divisor);
2746 __ Msub(out, temp, divisor, dividend);
2747 }
2748 }
2749}
2750
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002751void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2752 LocationSummary* locations =
2753 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2754 switch (div->GetResultType()) {
2755 case Primitive::kPrimInt:
2756 case Primitive::kPrimLong:
2757 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002758 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002759 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2760 break;
2761
2762 case Primitive::kPrimFloat:
2763 case Primitive::kPrimDouble:
2764 locations->SetInAt(0, Location::RequiresFpuRegister());
2765 locations->SetInAt(1, Location::RequiresFpuRegister());
2766 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2767 break;
2768
2769 default:
2770 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2771 }
2772}
2773
2774void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2775 Primitive::Type type = div->GetResultType();
2776 switch (type) {
2777 case Primitive::kPrimInt:
2778 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002779 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002780 break;
2781
2782 case Primitive::kPrimFloat:
2783 case Primitive::kPrimDouble:
2784 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2785 break;
2786
2787 default:
2788 LOG(FATAL) << "Unexpected div type " << type;
2789 }
2790}
2791
Alexandre Rames67555f72014-11-18 10:55:16 +00002792void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002793 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2794 ? LocationSummary::kCallOnSlowPath
2795 : LocationSummary::kNoCall;
2796 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002797 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2798 if (instruction->HasUses()) {
2799 locations->SetOut(Location::SameAsFirstInput());
2800 }
2801}
2802
2803void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2804 SlowPathCodeARM64* slow_path =
2805 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2806 codegen_->AddSlowPath(slow_path);
2807 Location value = instruction->GetLocations()->InAt(0);
2808
Alexandre Rames3e69f162014-12-10 10:36:50 +00002809 Primitive::Type type = instruction->GetType();
2810
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002811 if (!Primitive::IsIntegralType(type)) {
2812 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002813 return;
2814 }
2815
Alexandre Rames67555f72014-11-18 10:55:16 +00002816 if (value.IsConstant()) {
2817 int64_t divisor = Int64ConstantFrom(value);
2818 if (divisor == 0) {
2819 __ B(slow_path->GetEntryLabel());
2820 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002821 // A division by a non-null constant is valid. We don't need to perform
2822 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002823 }
2824 } else {
2825 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2826 }
2827}
2828
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002829void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2830 LocationSummary* locations =
2831 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2832 locations->SetOut(Location::ConstantLocation(constant));
2833}
2834
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002835void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2836 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002837 // Will be generated at use site.
2838}
2839
Alexandre Rames5319def2014-10-23 10:03:10 +01002840void LocationsBuilderARM64::VisitExit(HExit* exit) {
2841 exit->SetLocations(nullptr);
2842}
2843
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002844void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002845}
2846
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002847void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2848 LocationSummary* locations =
2849 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2850 locations->SetOut(Location::ConstantLocation(constant));
2851}
2852
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002853void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002854 // Will be generated at use site.
2855}
2856
David Brazdilfc6a86a2015-06-26 10:33:45 +00002857void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002858 DCHECK(!successor->IsExitBlock());
2859 HBasicBlock* block = got->GetBlock();
2860 HInstruction* previous = got->GetPrevious();
2861 HLoopInformation* info = block->GetLoopInformation();
2862
David Brazdil46e2a392015-03-16 17:31:52 +00002863 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002864 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2865 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2866 return;
2867 }
2868 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2869 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2870 }
2871 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002872 __ B(codegen_->GetLabelOf(successor));
2873 }
2874}
2875
David Brazdilfc6a86a2015-06-26 10:33:45 +00002876void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2877 got->SetLocations(nullptr);
2878}
2879
2880void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2881 HandleGoto(got, got->GetSuccessor());
2882}
2883
2884void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2885 try_boundary->SetLocations(nullptr);
2886}
2887
2888void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2889 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2890 if (!successor->IsExitBlock()) {
2891 HandleGoto(try_boundary, successor);
2892 }
2893}
2894
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002895void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002896 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002897 vixl::aarch64::Label* true_target,
2898 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002899 // FP branching requires both targets to be explicit. If either of the targets
2900 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002901 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002902 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002903
David Brazdil0debae72015-11-12 18:37:00 +00002904 if (true_target == nullptr && false_target == nullptr) {
2905 // Nothing to do. The code always falls through.
2906 return;
2907 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002908 // Constant condition, statically compared against "true" (integer value 1).
2909 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002910 if (true_target != nullptr) {
2911 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002912 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002913 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002914 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002915 if (false_target != nullptr) {
2916 __ B(false_target);
2917 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002918 }
David Brazdil0debae72015-11-12 18:37:00 +00002919 return;
2920 }
2921
2922 // The following code generates these patterns:
2923 // (1) true_target == nullptr && false_target != nullptr
2924 // - opposite condition true => branch to false_target
2925 // (2) true_target != nullptr && false_target == nullptr
2926 // - condition true => branch to true_target
2927 // (3) true_target != nullptr && false_target != nullptr
2928 // - condition true => branch to true_target
2929 // - branch to false_target
2930 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002931 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002932 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002933 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002934 if (true_target == nullptr) {
2935 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2936 } else {
2937 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2938 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002939 } else {
2940 // The condition instruction has not been materialized, use its inputs as
2941 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002942 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002943
David Brazdil0debae72015-11-12 18:37:00 +00002944 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002945 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002946 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002947 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002948 IfCondition opposite_condition = condition->GetOppositeCondition();
2949 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002950 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002951 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002952 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002953 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002954 // Integer cases.
2955 Register lhs = InputRegisterAt(condition, 0);
2956 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002957
2958 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01002959 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002960 if (true_target == nullptr) {
2961 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2962 non_fallthrough_target = false_target;
2963 } else {
2964 arm64_cond = ARM64Condition(condition->GetCondition());
2965 non_fallthrough_target = true_target;
2966 }
2967
Aart Bik086d27e2016-01-20 17:02:00 -08002968 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01002969 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002970 switch (arm64_cond) {
2971 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002972 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002973 break;
2974 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002975 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002976 break;
2977 case lt:
2978 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002979 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002980 break;
2981 case ge:
2982 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002983 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002984 break;
2985 default:
2986 // Without the `static_cast` the compiler throws an error for
2987 // `-Werror=sign-promo`.
2988 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2989 }
2990 } else {
2991 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002992 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002993 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002994 }
2995 }
David Brazdil0debae72015-11-12 18:37:00 +00002996
2997 // If neither branch falls through (case 3), the conditional branch to `true_target`
2998 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2999 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003000 __ B(false_target);
3001 }
David Brazdil0debae72015-11-12 18:37:00 +00003002
3003 if (fallthrough_target.IsLinked()) {
3004 __ Bind(&fallthrough_target);
3005 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003006}
3007
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003008void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
3009 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003010 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003011 locations->SetInAt(0, Location::RequiresRegister());
3012 }
3013}
3014
3015void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003016 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3017 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003018 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3019 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3020 true_target = nullptr;
3021 }
3022 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3023 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3024 false_target = nullptr;
3025 }
David Brazdil0debae72015-11-12 18:37:00 +00003026 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003027}
3028
3029void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
3030 LocationSummary* locations = new (GetGraph()->GetArena())
3031 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko239d6ea2016-09-05 10:44:04 +01003032 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00003033 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003034 locations->SetInAt(0, Location::RequiresRegister());
3035 }
3036}
3037
3038void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003039 SlowPathCodeARM64* slow_path =
3040 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003041 GenerateTestAndBranch(deoptimize,
3042 /* condition_input_index */ 0,
3043 slow_path->GetEntryLabel(),
3044 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003045}
3046
David Brazdilc0b601b2016-02-08 14:20:45 +00003047static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3048 return condition->IsCondition() &&
3049 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
3050}
3051
Alexandre Rames880f1192016-06-13 16:04:50 +01003052static inline Condition GetConditionForSelect(HCondition* condition) {
3053 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003054 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3055 : ARM64Condition(cond);
3056}
3057
David Brazdil74eb1b22015-12-14 11:44:01 +00003058void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3059 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01003060 if (Primitive::IsFloatingPointType(select->GetType())) {
3061 locations->SetInAt(0, Location::RequiresFpuRegister());
3062 locations->SetInAt(1, Location::RequiresFpuRegister());
3063 locations->SetOut(Location::RequiresFpuRegister());
3064 } else {
3065 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3066 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3067 bool is_true_value_constant = cst_true_value != nullptr;
3068 bool is_false_value_constant = cst_false_value != nullptr;
3069 // Ask VIXL whether we should synthesize constants in registers.
3070 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3071 Operand true_op = is_true_value_constant ?
3072 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3073 Operand false_op = is_false_value_constant ?
3074 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3075 bool true_value_in_register = false;
3076 bool false_value_in_register = false;
3077 MacroAssembler::GetCselSynthesisInformation(
3078 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3079 true_value_in_register |= !is_true_value_constant;
3080 false_value_in_register |= !is_false_value_constant;
3081
3082 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3083 : Location::ConstantLocation(cst_true_value));
3084 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3085 : Location::ConstantLocation(cst_false_value));
3086 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003087 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003088
David Brazdil74eb1b22015-12-14 11:44:01 +00003089 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3090 locations->SetInAt(2, Location::RequiresRegister());
3091 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003092}
3093
3094void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003095 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003096 Condition csel_cond;
3097
3098 if (IsBooleanValueOrMaterializedCondition(cond)) {
3099 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003100 // Use the condition flags set by the previous instruction.
3101 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003102 } else {
3103 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003104 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003105 }
3106 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003107 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003108 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003109 } else {
3110 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003111 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003112 }
3113
Alexandre Rames880f1192016-06-13 16:04:50 +01003114 if (Primitive::IsFloatingPointType(select->GetType())) {
3115 __ Fcsel(OutputFPRegister(select),
3116 InputFPRegisterAt(select, 1),
3117 InputFPRegisterAt(select, 0),
3118 csel_cond);
3119 } else {
3120 __ Csel(OutputRegister(select),
3121 InputOperandAt(select, 1),
3122 InputOperandAt(select, 0),
3123 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003124 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003125}
3126
David Srbecky0cf44932015-12-09 14:09:59 +00003127void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3128 new (GetGraph()->GetArena()) LocationSummary(info);
3129}
3130
David Srbeckyd28f4a02016-03-14 17:14:24 +00003131void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3132 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003133}
3134
3135void CodeGeneratorARM64::GenerateNop() {
3136 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003137}
3138
Alexandre Rames5319def2014-10-23 10:03:10 +01003139void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003140 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003141}
3142
3143void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003144 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003145}
3146
3147void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003148 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003149}
3150
3151void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003152 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003153}
3154
Roland Levillain44015862016-01-22 11:47:17 +00003155static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3156 return kEmitCompilerReadBarrier &&
3157 (kUseBakerReadBarrier ||
3158 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3159 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3160 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3161}
3162
Alexandre Rames67555f72014-11-18 10:55:16 +00003163void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003164 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003165 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003166 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003167 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003168 case TypeCheckKind::kExactCheck:
3169 case TypeCheckKind::kAbstractClassCheck:
3170 case TypeCheckKind::kClassHierarchyCheck:
3171 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003172 call_kind =
3173 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01003174 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003175 break;
3176 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003177 case TypeCheckKind::kUnresolvedCheck:
3178 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003179 call_kind = LocationSummary::kCallOnSlowPath;
3180 break;
3181 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003182
Alexandre Rames67555f72014-11-18 10:55:16 +00003183 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003184 if (baker_read_barrier_slow_path) {
3185 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
3186 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003187 locations->SetInAt(0, Location::RequiresRegister());
3188 locations->SetInAt(1, Location::RequiresRegister());
3189 // The "out" register is used as a temporary, so it overlaps with the inputs.
3190 // Note that TypeCheckSlowPathARM64 uses this register too.
3191 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3192 // When read barriers are enabled, we need a temporary register for
3193 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003194 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003195 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003196 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003197}
3198
3199void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003200 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003201 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003202 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003203 Register obj = InputRegisterAt(instruction, 0);
3204 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003205 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003206 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003207 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3208 locations->GetTemp(0) :
3209 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003210 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3211 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3212 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3213 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003214
Scott Wakeling97c72b72016-06-24 16:19:36 +01003215 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003216 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003217
3218 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003219 // Avoid null check if we know `obj` is not null.
3220 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003221 __ Cbz(obj, &zero);
3222 }
3223
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003224 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003225 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003226
Roland Levillain44015862016-01-22 11:47:17 +00003227 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003228 case TypeCheckKind::kExactCheck: {
3229 __ Cmp(out, cls);
3230 __ Cset(out, eq);
3231 if (zero.IsLinked()) {
3232 __ B(&done);
3233 }
3234 break;
3235 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003236
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003237 case TypeCheckKind::kAbstractClassCheck: {
3238 // If the class is abstract, we eagerly fetch the super class of the
3239 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003240 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003241 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003242 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003243 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003244 // If `out` is null, we use it for the result, and jump to `done`.
3245 __ Cbz(out, &done);
3246 __ Cmp(out, cls);
3247 __ B(ne, &loop);
3248 __ Mov(out, 1);
3249 if (zero.IsLinked()) {
3250 __ B(&done);
3251 }
3252 break;
3253 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003254
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003255 case TypeCheckKind::kClassHierarchyCheck: {
3256 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003257 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003258 __ Bind(&loop);
3259 __ Cmp(out, cls);
3260 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003261 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003262 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003263 __ Cbnz(out, &loop);
3264 // If `out` is null, we use it for the result, and jump to `done`.
3265 __ B(&done);
3266 __ Bind(&success);
3267 __ Mov(out, 1);
3268 if (zero.IsLinked()) {
3269 __ B(&done);
3270 }
3271 break;
3272 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003273
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003274 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003275 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003276 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003277 __ Cmp(out, cls);
3278 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003279 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003280 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003281 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003282 // If `out` is null, we use it for the result, and jump to `done`.
3283 __ Cbz(out, &done);
3284 __ Ldrh(out, HeapOperand(out, primitive_offset));
3285 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3286 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003287 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003288 __ Mov(out, 1);
3289 __ B(&done);
3290 break;
3291 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003292
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003293 case TypeCheckKind::kArrayCheck: {
3294 __ Cmp(out, cls);
3295 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003296 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3297 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003298 codegen_->AddSlowPath(slow_path);
3299 __ B(ne, slow_path->GetEntryLabel());
3300 __ Mov(out, 1);
3301 if (zero.IsLinked()) {
3302 __ B(&done);
3303 }
3304 break;
3305 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003306
Calin Juravle98893e12015-10-02 21:05:03 +01003307 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003308 case TypeCheckKind::kInterfaceCheck: {
3309 // Note that we indeed only call on slow path, but we always go
3310 // into the slow path for the unresolved and interface check
3311 // cases.
3312 //
3313 // We cannot directly call the InstanceofNonTrivial runtime
3314 // entry point without resorting to a type checking slow path
3315 // here (i.e. by calling InvokeRuntime directly), as it would
3316 // require to assign fixed registers for the inputs of this
3317 // HInstanceOf instruction (following the runtime calling
3318 // convention), which might be cluttered by the potential first
3319 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003320 //
3321 // TODO: Introduce a new runtime entry point taking the object
3322 // to test (instead of its class) as argument, and let it deal
3323 // with the read barrier issues. This will let us refactor this
3324 // case of the `switch` code as it was previously (with a direct
3325 // call to the runtime not using a type checking slow path).
3326 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003327 DCHECK(locations->OnlyCallsOnSlowPath());
3328 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3329 /* is_fatal */ false);
3330 codegen_->AddSlowPath(slow_path);
3331 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003332 if (zero.IsLinked()) {
3333 __ B(&done);
3334 }
3335 break;
3336 }
3337 }
3338
3339 if (zero.IsLinked()) {
3340 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003341 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003342 }
3343
3344 if (done.IsLinked()) {
3345 __ Bind(&done);
3346 }
3347
3348 if (slow_path != nullptr) {
3349 __ Bind(slow_path->GetExitLabel());
3350 }
3351}
3352
3353void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3354 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3355 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3356
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003357 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003358 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003359 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003360 case TypeCheckKind::kExactCheck:
3361 case TypeCheckKind::kAbstractClassCheck:
3362 case TypeCheckKind::kClassHierarchyCheck:
3363 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003364 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3365 LocationSummary::kCallOnSlowPath :
3366 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Vladimir Marko70e97462016-08-09 11:04:26 +01003367 baker_read_barrier_slow_path = kUseBakerReadBarrier && !throws_into_catch;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003368 break;
3369 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003370 case TypeCheckKind::kUnresolvedCheck:
3371 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003372 call_kind = LocationSummary::kCallOnSlowPath;
3373 break;
3374 }
3375
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003376 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003377 if (baker_read_barrier_slow_path) {
3378 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
3379 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003380 locations->SetInAt(0, Location::RequiresRegister());
3381 locations->SetInAt(1, Location::RequiresRegister());
3382 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3383 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003384 // When read barriers are enabled, we need an additional temporary
3385 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003386 if (TypeCheckNeedsATemporary(type_check_kind)) {
3387 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003388 }
3389}
3390
3391void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003392 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003393 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003394 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003395 Register obj = InputRegisterAt(instruction, 0);
3396 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003397 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003398 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3399 locations->GetTemp(1) :
3400 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003401 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003402 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3403 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3404 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3405 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003406
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003407 bool is_type_check_slow_path_fatal =
3408 (type_check_kind == TypeCheckKind::kExactCheck ||
3409 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3410 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3411 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3412 !instruction->CanThrowIntoCatchBlock();
3413 SlowPathCodeARM64* type_check_slow_path =
3414 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3415 is_type_check_slow_path_fatal);
3416 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003417
Scott Wakeling97c72b72016-06-24 16:19:36 +01003418 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003419 // Avoid null check if we know obj is not null.
3420 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003421 __ Cbz(obj, &done);
3422 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003423
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003424 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003425 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003426
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003427 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003428 case TypeCheckKind::kExactCheck:
3429 case TypeCheckKind::kArrayCheck: {
3430 __ Cmp(temp, cls);
3431 // Jump to slow path for throwing the exception or doing a
3432 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003433 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003434 break;
3435 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003436
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003437 case TypeCheckKind::kAbstractClassCheck: {
3438 // If the class is abstract, we eagerly fetch the super class of the
3439 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003440 vixl::aarch64::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003441 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003442 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003443 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003444
3445 // If the class reference currently in `temp` is not null, jump
3446 // to the `compare_classes` label to compare it with the checked
3447 // class.
3448 __ Cbnz(temp, &compare_classes);
3449 // Otherwise, jump to the slow path to throw the exception.
3450 //
3451 // But before, move back the object's class into `temp` before
3452 // going into the slow path, as it has been overwritten in the
3453 // meantime.
3454 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003455 GenerateReferenceLoadTwoRegisters(
3456 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003457 __ B(type_check_slow_path->GetEntryLabel());
3458
3459 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003460 __ Cmp(temp, cls);
3461 __ B(ne, &loop);
3462 break;
3463 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003464
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003465 case TypeCheckKind::kClassHierarchyCheck: {
3466 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003467 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003468 __ Bind(&loop);
3469 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003470 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003471
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003472 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003473 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003474
3475 // If the class reference currently in `temp` is not null, jump
3476 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003477 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003478 // Otherwise, jump to the slow path to throw the exception.
3479 //
3480 // But before, move back the object's class into `temp` before
3481 // going into the slow path, as it has been overwritten in the
3482 // meantime.
3483 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003484 GenerateReferenceLoadTwoRegisters(
3485 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003486 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003487 break;
3488 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003489
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003490 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003491 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003492 vixl::aarch64::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003493 __ Cmp(temp, cls);
3494 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003495
3496 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003497 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003498 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003499
3500 // If the component type is not null (i.e. the object is indeed
3501 // an array), jump to label `check_non_primitive_component_type`
3502 // to further check that this component type is not a primitive
3503 // type.
3504 __ Cbnz(temp, &check_non_primitive_component_type);
3505 // Otherwise, jump to the slow path to throw the exception.
3506 //
3507 // But before, move back the object's class into `temp` before
3508 // going into the slow path, as it has been overwritten in the
3509 // meantime.
3510 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003511 GenerateReferenceLoadTwoRegisters(
3512 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003513 __ B(type_check_slow_path->GetEntryLabel());
3514
3515 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003516 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3517 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003518 __ Cbz(temp, &done);
3519 // Same comment as above regarding `temp` and the slow path.
3520 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003521 GenerateReferenceLoadTwoRegisters(
3522 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003523 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003524 break;
3525 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003526
Calin Juravle98893e12015-10-02 21:05:03 +01003527 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003528 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003529 // We always go into the type check slow path for the unresolved
3530 // and interface check cases.
3531 //
3532 // We cannot directly call the CheckCast runtime entry point
3533 // without resorting to a type checking slow path here (i.e. by
3534 // calling InvokeRuntime directly), as it would require to
3535 // assign fixed registers for the inputs of this HInstanceOf
3536 // instruction (following the runtime calling convention), which
3537 // might be cluttered by the potential first read barrier
3538 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003539 //
3540 // TODO: Introduce a new runtime entry point taking the object
3541 // to test (instead of its class) as argument, and let it deal
3542 // with the read barrier issues. This will let us refactor this
3543 // case of the `switch` code as it was previously (with a direct
3544 // call to the runtime not using a type checking slow path).
3545 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003546 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003547 break;
3548 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003549 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003550
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003551 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003552}
3553
Alexandre Rames5319def2014-10-23 10:03:10 +01003554void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3555 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3556 locations->SetOut(Location::ConstantLocation(constant));
3557}
3558
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003559void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003560 // Will be generated at use site.
3561}
3562
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003563void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3564 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3565 locations->SetOut(Location::ConstantLocation(constant));
3566}
3567
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003568void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003569 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003570}
3571
Calin Juravle175dc732015-08-25 15:42:32 +01003572void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3573 // The trampoline uses the same calling convention as dex calling conventions,
3574 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3575 // the method_idx.
3576 HandleInvoke(invoke);
3577}
3578
3579void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3580 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3581}
3582
Alexandre Rames5319def2014-10-23 10:03:10 +01003583void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003584 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003585 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003586}
3587
Alexandre Rames67555f72014-11-18 10:55:16 +00003588void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3589 HandleInvoke(invoke);
3590}
3591
3592void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3593 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003594 LocationSummary* locations = invoke->GetLocations();
3595 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003596 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003597 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003598 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003599
3600 // The register ip1 is required to be used for the hidden argument in
3601 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003602 MacroAssembler* masm = GetVIXLAssembler();
3603 UseScratchRegisterScope scratch_scope(masm);
3604 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003605 scratch_scope.Exclude(ip1);
3606 __ Mov(ip1, invoke->GetDexMethodIndex());
3607
Alexandre Rames67555f72014-11-18 10:55:16 +00003608 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003609 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003610 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003611 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003612 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003613 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003614 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003615 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003616 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003617 // Instead of simply (possibly) unpoisoning `temp` here, we should
3618 // emit a read barrier for the previous class reference load.
3619 // However this is not required in practice, as this is an
3620 // intermediate/temporary reference and because the current
3621 // concurrent copying collector keeps the from-space memory
3622 // intact/accessible until the end of the marking phase (the
3623 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003624 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003625 __ Ldr(temp,
3626 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3627 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003628 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003629 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003630 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003631 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003632 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003633 // lr();
3634 __ Blr(lr);
3635 DCHECK(!codegen_->IsLeafMethod());
3636 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3637}
3638
3639void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003640 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3641 if (intrinsic.TryDispatch(invoke)) {
3642 return;
3643 }
3644
Alexandre Rames67555f72014-11-18 10:55:16 +00003645 HandleInvoke(invoke);
3646}
3647
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003648void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003649 // Explicit clinit checks triggered by static invokes must have been pruned by
3650 // art::PrepareForRegisterAllocation.
3651 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003652
Andreas Gampe878d58c2015-01-15 23:24:00 -08003653 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3654 if (intrinsic.TryDispatch(invoke)) {
3655 return;
3656 }
3657
Alexandre Rames67555f72014-11-18 10:55:16 +00003658 HandleInvoke(invoke);
3659}
3660
Andreas Gampe878d58c2015-01-15 23:24:00 -08003661static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3662 if (invoke->GetLocations()->Intrinsified()) {
3663 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3664 intrinsic.Dispatch(invoke);
3665 return true;
3666 }
3667 return false;
3668}
3669
Vladimir Markodc151b22015-10-15 18:02:30 +01003670HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3671 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3672 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003673 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003674 return desired_dispatch_info;
3675}
3676
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003677void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003678 // For better instruction scheduling we load the direct code pointer before the method pointer.
3679 bool direct_code_loaded = false;
3680 switch (invoke->GetCodePtrLocation()) {
3681 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3682 // LR = code address from literal pool with link-time patch.
3683 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3684 direct_code_loaded = true;
3685 break;
3686 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3687 // LR = invoke->GetDirectCodePtr();
3688 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3689 direct_code_loaded = true;
3690 break;
3691 default:
3692 break;
3693 }
3694
Andreas Gampe878d58c2015-01-15 23:24:00 -08003695 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003696 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3697 switch (invoke->GetMethodLoadKind()) {
3698 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3699 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003700 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003701 break;
3702 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003703 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003704 break;
3705 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3706 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003707 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003708 break;
3709 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3710 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003711 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003712 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3713 break;
3714 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3715 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003716 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3717 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003718 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003719 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003720 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003721 __ Bind(adrp_label);
3722 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003723 }
Vladimir Marko58155012015-08-19 12:49:41 +00003724 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003725 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003726 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003727 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003728 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003729 __ Bind(ldr_label);
3730 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003731 }
Vladimir Marko58155012015-08-19 12:49:41 +00003732 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003733 }
Vladimir Marko58155012015-08-19 12:49:41 +00003734 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003735 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003736 Register reg = XRegisterFrom(temp);
3737 Register method_reg;
3738 if (current_method.IsRegister()) {
3739 method_reg = XRegisterFrom(current_method);
3740 } else {
3741 DCHECK(invoke->GetLocations()->Intrinsified());
3742 DCHECK(!current_method.IsValid());
3743 method_reg = reg;
3744 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3745 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003746
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003747 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003748 __ Ldr(reg.X(),
3749 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07003750 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003751 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003752 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3753 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003754 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3755 break;
3756 }
3757 }
3758
3759 switch (invoke->GetCodePtrLocation()) {
3760 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3761 __ Bl(&frame_entry_label_);
3762 break;
3763 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3764 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003765 vixl::aarch64::Label* label = &relative_call_patches_.back().label;
3766 SingleEmissionCheckScope guard(GetVIXLAssembler());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003767 __ Bind(label);
3768 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003769 break;
3770 }
3771 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3772 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3773 // LR prepared above for better instruction scheduling.
3774 DCHECK(direct_code_loaded);
3775 // lr()
3776 __ Blr(lr);
3777 break;
3778 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3779 // LR = callee_method->entry_point_from_quick_compiled_code_;
3780 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003781 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07003782 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003783 // lr()
3784 __ Blr(lr);
3785 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003786 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003787
Andreas Gampe878d58c2015-01-15 23:24:00 -08003788 DCHECK(!IsLeafMethod());
3789}
3790
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003791void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003792 // Use the calling convention instead of the location of the receiver, as
3793 // intrinsics may have put the receiver in a different register. In the intrinsics
3794 // slow path, the arguments have been moved to the right place, so here we are
3795 // guaranteed that the receiver is the first register of the calling convention.
3796 InvokeDexCallingConvention calling_convention;
3797 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003798 Register temp = XRegisterFrom(temp_in);
3799 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3800 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3801 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003802 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003803
3804 BlockPoolsScope block_pools(GetVIXLAssembler());
3805
3806 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003807 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003808 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003809 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003810 // Instead of simply (possibly) unpoisoning `temp` here, we should
3811 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003812 // intermediate/temporary reference and because the current
3813 // concurrent copying collector keeps the from-space memory
3814 // intact/accessible until the end of the marking phase (the
3815 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003816 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3817 // temp = temp->GetMethodAt(method_offset);
3818 __ Ldr(temp, MemOperand(temp, method_offset));
3819 // lr = temp->GetEntryPoint();
3820 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3821 // lr();
3822 __ Blr(lr);
3823}
3824
Scott Wakeling97c72b72016-06-24 16:19:36 +01003825vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
3826 const DexFile& dex_file,
3827 uint32_t string_index,
3828 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003829 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3830}
3831
Scott Wakeling97c72b72016-06-24 16:19:36 +01003832vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
3833 const DexFile& dex_file,
3834 uint32_t type_index,
3835 vixl::aarch64::Label* adrp_label) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003836 return NewPcRelativePatch(dex_file, type_index, adrp_label, &pc_relative_type_patches_);
3837}
3838
Scott Wakeling97c72b72016-06-24 16:19:36 +01003839vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
3840 const DexFile& dex_file,
3841 uint32_t element_offset,
3842 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003843 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3844}
3845
Scott Wakeling97c72b72016-06-24 16:19:36 +01003846vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
3847 const DexFile& dex_file,
3848 uint32_t offset_or_index,
3849 vixl::aarch64::Label* adrp_label,
3850 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003851 // Add a patch entry and return the label.
3852 patches->emplace_back(dex_file, offset_or_index);
3853 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003854 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003855 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3856 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3857 return label;
3858}
3859
Scott Wakeling97c72b72016-06-24 16:19:36 +01003860vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003861 const DexFile& dex_file, uint32_t string_index) {
3862 return boot_image_string_patches_.GetOrCreate(
3863 StringReference(&dex_file, string_index),
3864 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3865}
3866
Scott Wakeling97c72b72016-06-24 16:19:36 +01003867vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003868 const DexFile& dex_file, uint32_t type_index) {
3869 return boot_image_type_patches_.GetOrCreate(
3870 TypeReference(&dex_file, type_index),
3871 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3872}
3873
Scott Wakeling97c72b72016-06-24 16:19:36 +01003874vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
3875 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003876 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3877 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3878 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3879}
3880
Scott Wakeling97c72b72016-06-24 16:19:36 +01003881vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(
3882 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003883 return DeduplicateUint64Literal(address);
3884}
3885
Vladimir Marko58155012015-08-19 12:49:41 +00003886void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3887 DCHECK(linker_patches->empty());
3888 size_t size =
3889 method_patches_.size() +
3890 call_patches_.size() +
3891 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003892 pc_relative_dex_cache_patches_.size() +
3893 boot_image_string_patches_.size() +
3894 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003895 boot_image_type_patches_.size() +
3896 pc_relative_type_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003897 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003898 linker_patches->reserve(size);
3899 for (const auto& entry : method_patches_) {
3900 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003901 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3902 linker_patches->push_back(LinkerPatch::MethodPatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003903 target_method.dex_file,
3904 target_method.dex_method_index));
3905 }
3906 for (const auto& entry : call_patches_) {
3907 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003908 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3909 linker_patches->push_back(LinkerPatch::CodePatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003910 target_method.dex_file,
3911 target_method.dex_method_index));
3912 }
Scott Wakeling97c72b72016-06-24 16:19:36 +01003913 for (const MethodPatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) {
3914 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003915 info.target_method.dex_file,
3916 info.target_method.dex_method_index));
3917 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003918 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003919 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003920 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003921 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003922 info.offset_or_index));
3923 }
3924 for (const auto& entry : boot_image_string_patches_) {
3925 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003926 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3927 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003928 target_string.dex_file,
3929 target_string.string_index));
3930 }
3931 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003932 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003933 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003934 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003935 info.offset_or_index));
3936 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003937 for (const auto& entry : boot_image_type_patches_) {
3938 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003939 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3940 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003941 target_type.dex_file,
3942 target_type.type_index));
3943 }
3944 for (const PcRelativePatchInfo& info : pc_relative_type_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003945 linker_patches->push_back(LinkerPatch::RelativeTypePatch(info.label.GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003946 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003947 info.pc_insn_label->GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003948 info.offset_or_index));
3949 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003950 for (const auto& entry : boot_image_address_patches_) {
3951 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003952 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3953 linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003954 }
3955}
3956
Scott Wakeling97c72b72016-06-24 16:19:36 +01003957vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003958 Uint32ToLiteralMap* map) {
3959 return map->GetOrCreate(
3960 value,
3961 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3962}
3963
Scott Wakeling97c72b72016-06-24 16:19:36 +01003964vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003965 return uint64_literals_.GetOrCreate(
3966 value,
3967 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003968}
3969
Scott Wakeling97c72b72016-06-24 16:19:36 +01003970vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003971 MethodReference target_method,
3972 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003973 return map->GetOrCreate(
3974 target_method,
3975 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003976}
3977
Scott Wakeling97c72b72016-06-24 16:19:36 +01003978vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003979 MethodReference target_method) {
3980 return DeduplicateMethodLiteral(target_method, &method_patches_);
3981}
3982
Scott Wakeling97c72b72016-06-24 16:19:36 +01003983vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003984 MethodReference target_method) {
3985 return DeduplicateMethodLiteral(target_method, &call_patches_);
3986}
3987
3988
Andreas Gampe878d58c2015-01-15 23:24:00 -08003989void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003990 // Explicit clinit checks triggered by static invokes must have been pruned by
3991 // art::PrepareForRegisterAllocation.
3992 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003993
Andreas Gampe878d58c2015-01-15 23:24:00 -08003994 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3995 return;
3996 }
3997
Alexandre Ramesd921d642015-04-16 15:07:16 +01003998 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003999 LocationSummary* locations = invoke->GetLocations();
4000 codegen_->GenerateStaticOrDirectCall(
4001 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00004002 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01004003}
4004
4005void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004006 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4007 return;
4008 }
4009
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004010 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004011 DCHECK(!codegen_->IsLeafMethod());
4012 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4013}
4014
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004015HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4016 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004017 switch (desired_class_load_kind) {
4018 case HLoadClass::LoadKind::kReferrersClass:
4019 break;
4020 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4021 DCHECK(!GetCompilerOptions().GetCompilePic());
4022 break;
4023 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
4024 DCHECK(GetCompilerOptions().GetCompilePic());
4025 break;
4026 case HLoadClass::LoadKind::kBootImageAddress:
4027 break;
4028 case HLoadClass::LoadKind::kDexCacheAddress:
4029 DCHECK(Runtime::Current()->UseJitCompilation());
4030 break;
4031 case HLoadClass::LoadKind::kDexCachePcRelative:
4032 DCHECK(!Runtime::Current()->UseJitCompilation());
4033 break;
4034 case HLoadClass::LoadKind::kDexCacheViaMethod:
4035 break;
4036 }
4037 return desired_class_load_kind;
4038}
4039
Alexandre Rames67555f72014-11-18 10:55:16 +00004040void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004041 if (cls->NeedsAccessCheck()) {
4042 InvokeRuntimeCallingConvention calling_convention;
4043 CodeGenerator::CreateLoadClassLocationSummary(
4044 cls,
4045 LocationFrom(calling_convention.GetRegisterAt(0)),
Scott Wakeling97c72b72016-06-24 16:19:36 +01004046 LocationFrom(vixl::aarch64::x0),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004047 /* code_generator_supports_read_barrier */ true);
4048 return;
4049 }
4050
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004051 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
4052 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004053 ? LocationSummary::kCallOnSlowPath
4054 : LocationSummary::kNoCall;
4055 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004056 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko70e97462016-08-09 11:04:26 +01004057 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
4058 }
4059
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004060 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4061 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
4062 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
4063 locations->SetInAt(0, Location::RequiresRegister());
4064 }
4065 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004066}
4067
4068void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01004069 if (cls->NeedsAccessCheck()) {
4070 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004071 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004072 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01004073 return;
4074 }
4075
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004076 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004077 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004078
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004079 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004080 bool generate_null_check = false;
4081 switch (cls->GetLoadKind()) {
4082 case HLoadClass::LoadKind::kReferrersClass: {
4083 DCHECK(!cls->CanCallRuntime());
4084 DCHECK(!cls->MustGenerateClinitCheck());
4085 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4086 Register current_method = InputRegisterAt(cls, 0);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004087 GenerateGcRootFieldLoad(cls,
4088 out_loc,
4089 current_method,
4090 ArtMethod::DeclaringClassOffset().Int32Value(),
4091 /*fixup_label*/ nullptr,
4092 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004093 break;
4094 }
4095 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004096 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004097 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4098 cls->GetTypeIndex()));
4099 break;
4100 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004101 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004102 // Add ADRP with its PC-relative type patch.
4103 const DexFile& dex_file = cls->GetDexFile();
4104 uint32_t type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004105 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004106 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004107 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004108 __ Bind(adrp_label);
4109 __ adrp(out.X(), /* offset placeholder */ 0);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004110 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004111 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004112 vixl::aarch64::Label* add_label =
4113 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004114 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004115 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004116 __ Bind(add_label);
4117 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004118 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004119 break;
4120 }
4121 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004122 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004123 DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
4124 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
4125 break;
4126 }
4127 case HLoadClass::LoadKind::kDexCacheAddress: {
4128 DCHECK_NE(cls->GetAddress(), 0u);
4129 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4130 // that gives a 16KiB range. To try and reduce the number of literals if we load
4131 // multiple types, simply split the dex cache address to a 16KiB aligned base
4132 // loaded from a literal and the remaining offset embedded in the load.
4133 static_assert(sizeof(GcRoot<mirror::Class>) == 4u, "Expected GC root to be 4 bytes.");
4134 DCHECK_ALIGNED(cls->GetAddress(), 4u);
4135 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4136 uint64_t base_address = cls->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4137 uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits);
4138 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4139 // /* GcRoot<mirror::Class> */ out = *(base_address + offset)
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004140 GenerateGcRootFieldLoad(cls,
4141 out_loc,
4142 out.X(),
4143 offset,
4144 /*fixup_label*/ nullptr,
4145 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004146 generate_null_check = !cls->IsInDexCache();
4147 break;
4148 }
4149 case HLoadClass::LoadKind::kDexCachePcRelative: {
4150 // Add ADRP with its PC-relative DexCache access patch.
4151 const DexFile& dex_file = cls->GetDexFile();
4152 uint32_t element_offset = cls->GetDexCacheElementOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004153 vixl::aarch64::Label* adrp_label =
4154 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004155 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004156 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004157 __ Bind(adrp_label);
4158 __ adrp(out.X(), /* offset placeholder */ 0);
4159 }
4160 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004161 vixl::aarch64::Label* ldr_label =
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004162 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4163 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004164 GenerateGcRootFieldLoad(cls,
4165 out_loc,
4166 out.X(),
4167 /* offset placeholder */ 0,
4168 ldr_label,
4169 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004170 generate_null_check = !cls->IsInDexCache();
4171 break;
4172 }
4173 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4174 MemberOffset resolved_types_offset =
4175 ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
4176 // /* GcRoot<mirror::Class>[] */ out =
4177 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4178 Register current_method = InputRegisterAt(cls, 0);
4179 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
4180 // /* GcRoot<mirror::Class> */ out = out[type_index]
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004181 GenerateGcRootFieldLoad(cls,
4182 out_loc,
4183 out.X(),
4184 CodeGenerator::GetCacheOffset(cls->GetTypeIndex()),
4185 /*fixup_label*/ nullptr,
4186 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004187 generate_null_check = !cls->IsInDexCache();
4188 break;
4189 }
4190 }
4191
4192 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4193 DCHECK(cls->CanCallRuntime());
4194 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4195 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4196 codegen_->AddSlowPath(slow_path);
4197 if (generate_null_check) {
4198 __ Cbz(out, slow_path->GetEntryLabel());
4199 }
4200 if (cls->MustGenerateClinitCheck()) {
4201 GenerateClassInitializationCheck(slow_path, out);
4202 } else {
4203 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004204 }
4205 }
4206}
4207
David Brazdilcb1c0552015-08-04 16:22:25 +01004208static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004209 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004210}
4211
Alexandre Rames67555f72014-11-18 10:55:16 +00004212void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4213 LocationSummary* locations =
4214 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4215 locations->SetOut(Location::RequiresRegister());
4216}
4217
4218void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004219 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4220}
4221
4222void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4223 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4224}
4225
4226void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4227 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004228}
4229
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004230HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4231 HLoadString::LoadKind desired_string_load_kind) {
4232 if (kEmitCompilerReadBarrier) {
4233 switch (desired_string_load_kind) {
4234 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4235 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4236 case HLoadString::LoadKind::kBootImageAddress:
4237 // TODO: Implement for read barrier.
4238 return HLoadString::LoadKind::kDexCacheViaMethod;
4239 default:
4240 break;
4241 }
4242 }
4243 switch (desired_string_load_kind) {
4244 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4245 DCHECK(!GetCompilerOptions().GetCompilePic());
4246 break;
4247 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4248 DCHECK(GetCompilerOptions().GetCompilePic());
4249 break;
4250 case HLoadString::LoadKind::kBootImageAddress:
4251 break;
4252 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01004253 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004254 break;
4255 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01004256 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004257 break;
4258 case HLoadString::LoadKind::kDexCacheViaMethod:
4259 break;
4260 }
4261 return desired_string_load_kind;
4262}
4263
Alexandre Rames67555f72014-11-18 10:55:16 +00004264void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004265 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
4266 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004267 : LocationSummary::kNoCall;
4268 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004269 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4270 locations->SetInAt(0, Location::RequiresRegister());
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004271 InvokeRuntimeCallingConvention calling_convention;
4272 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
4273 } else {
4274 locations->SetOut(Location::RequiresRegister());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004275 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004276}
4277
4278void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004279 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004280
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004281 switch (load->GetLoadKind()) {
4282 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4283 DCHECK(!kEmitCompilerReadBarrier);
4284 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4285 load->GetStringIndex()));
4286 return; // No dex cache slow path.
4287 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4288 DCHECK(!kEmitCompilerReadBarrier);
4289 // Add ADRP with its PC-relative String patch.
4290 const DexFile& dex_file = load->GetDexFile();
4291 uint32_t string_index = load->GetStringIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004292 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004293 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004294 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004295 __ Bind(adrp_label);
4296 __ adrp(out.X(), /* offset placeholder */ 0);
4297 }
4298 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004299 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004300 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4301 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004302 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004303 __ Bind(add_label);
4304 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4305 }
4306 return; // No dex cache slow path.
4307 }
4308 case HLoadString::LoadKind::kBootImageAddress: {
4309 DCHECK(!kEmitCompilerReadBarrier);
4310 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4311 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4312 return; // No dex cache slow path.
4313 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004314 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004315 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004316 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004317
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004318 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004319 InvokeRuntimeCallingConvention calling_convention;
4320 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex());
4321 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
4322 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004323}
4324
Alexandre Rames5319def2014-10-23 10:03:10 +01004325void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4326 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4327 locations->SetOut(Location::ConstantLocation(constant));
4328}
4329
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004330void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004331 // Will be generated at use site.
4332}
4333
Alexandre Rames67555f72014-11-18 10:55:16 +00004334void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4335 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004336 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004337 InvokeRuntimeCallingConvention calling_convention;
4338 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4339}
4340
4341void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004342 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject: kQuickUnlockObject,
4343 instruction,
4344 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004345 if (instruction->IsEnter()) {
4346 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4347 } else {
4348 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4349 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004350}
4351
Alexandre Rames42d641b2014-10-27 14:00:51 +00004352void LocationsBuilderARM64::VisitMul(HMul* mul) {
4353 LocationSummary* locations =
4354 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4355 switch (mul->GetResultType()) {
4356 case Primitive::kPrimInt:
4357 case Primitive::kPrimLong:
4358 locations->SetInAt(0, Location::RequiresRegister());
4359 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004360 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004361 break;
4362
4363 case Primitive::kPrimFloat:
4364 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004365 locations->SetInAt(0, Location::RequiresFpuRegister());
4366 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004367 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004368 break;
4369
4370 default:
4371 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4372 }
4373}
4374
4375void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4376 switch (mul->GetResultType()) {
4377 case Primitive::kPrimInt:
4378 case Primitive::kPrimLong:
4379 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4380 break;
4381
4382 case Primitive::kPrimFloat:
4383 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004384 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004385 break;
4386
4387 default:
4388 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4389 }
4390}
4391
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004392void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4393 LocationSummary* locations =
4394 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4395 switch (neg->GetResultType()) {
4396 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004397 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004398 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004399 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004400 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004401
4402 case Primitive::kPrimFloat:
4403 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004404 locations->SetInAt(0, Location::RequiresFpuRegister());
4405 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004406 break;
4407
4408 default:
4409 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4410 }
4411}
4412
4413void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4414 switch (neg->GetResultType()) {
4415 case Primitive::kPrimInt:
4416 case Primitive::kPrimLong:
4417 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4418 break;
4419
4420 case Primitive::kPrimFloat:
4421 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004422 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004423 break;
4424
4425 default:
4426 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4427 }
4428}
4429
4430void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4431 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004432 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004433 InvokeRuntimeCallingConvention calling_convention;
4434 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004435 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004436 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004437 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004438}
4439
4440void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4441 LocationSummary* locations = instruction->GetLocations();
4442 InvokeRuntimeCallingConvention calling_convention;
4443 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4444 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004445 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004446 // Note: if heap poisoning is enabled, the entry point takes cares
4447 // of poisoning the reference.
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004448 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Mathieu Chartiere401d142015-04-22 13:56:20 -07004449 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004450}
4451
Alexandre Rames5319def2014-10-23 10:03:10 +01004452void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4453 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004454 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004455 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004456 if (instruction->IsStringAlloc()) {
4457 locations->AddTemp(LocationFrom(kArtMethodRegister));
4458 } else {
4459 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4460 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4461 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004462 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4463}
4464
4465void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004466 // Note: if heap poisoning is enabled, the entry point takes cares
4467 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004468 if (instruction->IsStringAlloc()) {
4469 // String is allocated through StringFactory. Call NewEmptyString entry point.
4470 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07004471 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004472 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4473 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4474 __ Blr(lr);
4475 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4476 } else {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004477 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00004478 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4479 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004480}
4481
4482void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4483 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004484 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004485 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004486}
4487
4488void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004489 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004490 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004491 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004492 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004493 break;
4494
4495 default:
4496 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4497 }
4498}
4499
David Brazdil66d126e2015-04-03 16:02:44 +01004500void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4501 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4502 locations->SetInAt(0, Location::RequiresRegister());
4503 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4504}
4505
4506void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004507 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01004508}
4509
Alexandre Rames5319def2014-10-23 10:03:10 +01004510void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004511 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4512 ? LocationSummary::kCallOnSlowPath
4513 : LocationSummary::kNoCall;
4514 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004515 locations->SetInAt(0, Location::RequiresRegister());
4516 if (instruction->HasUses()) {
4517 locations->SetOut(Location::SameAsFirstInput());
4518 }
4519}
4520
Calin Juravle2ae48182016-03-16 14:05:09 +00004521void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4522 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004523 return;
4524 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004525
Alexandre Ramesd921d642015-04-16 15:07:16 +01004526 BlockPoolsScope block_pools(GetVIXLAssembler());
4527 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004528 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004529 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004530}
4531
Calin Juravle2ae48182016-03-16 14:05:09 +00004532void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004533 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004534 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004535
4536 LocationSummary* locations = instruction->GetLocations();
4537 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004538
4539 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004540}
4541
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004542void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004543 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004544}
4545
Alexandre Rames67555f72014-11-18 10:55:16 +00004546void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4547 HandleBinaryOp(instruction);
4548}
4549
4550void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4551 HandleBinaryOp(instruction);
4552}
4553
Alexandre Rames3e69f162014-12-10 10:36:50 +00004554void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4555 LOG(FATAL) << "Unreachable";
4556}
4557
4558void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4559 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4560}
4561
Alexandre Rames5319def2014-10-23 10:03:10 +01004562void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4563 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4564 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4565 if (location.IsStackSlot()) {
4566 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4567 } else if (location.IsDoubleStackSlot()) {
4568 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4569 }
4570 locations->SetOut(location);
4571}
4572
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004573void InstructionCodeGeneratorARM64::VisitParameterValue(
4574 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004575 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004576}
4577
4578void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4579 LocationSummary* locations =
4580 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004581 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004582}
4583
4584void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4585 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4586 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004587}
4588
4589void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4590 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004591 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004592 locations->SetInAt(i, Location::Any());
4593 }
4594 locations->SetOut(Location::Any());
4595}
4596
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004597void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004598 LOG(FATAL) << "Unreachable";
4599}
4600
Serban Constantinescu02164b32014-11-13 14:05:07 +00004601void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004602 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004603 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004604 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
4605 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004606 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4607
4608 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004609 case Primitive::kPrimInt:
4610 case Primitive::kPrimLong:
4611 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004612 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004613 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4614 break;
4615
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004616 case Primitive::kPrimFloat:
4617 case Primitive::kPrimDouble: {
4618 InvokeRuntimeCallingConvention calling_convention;
4619 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4620 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4621 locations->SetOut(calling_convention.GetReturnLocation(type));
4622
4623 break;
4624 }
4625
Serban Constantinescu02164b32014-11-13 14:05:07 +00004626 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004627 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004628 }
4629}
4630
4631void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4632 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004633
Serban Constantinescu02164b32014-11-13 14:05:07 +00004634 switch (type) {
4635 case Primitive::kPrimInt:
4636 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004637 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004638 break;
4639 }
4640
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004641 case Primitive::kPrimFloat:
4642 case Primitive::kPrimDouble: {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004643 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
4644 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004645 if (type == Primitive::kPrimFloat) {
4646 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4647 } else {
4648 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4649 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004650 break;
4651 }
4652
Serban Constantinescu02164b32014-11-13 14:05:07 +00004653 default:
4654 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004655 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004656 }
4657}
4658
Calin Juravle27df7582015-04-17 19:12:31 +01004659void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4660 memory_barrier->SetLocations(nullptr);
4661}
4662
4663void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004664 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004665}
4666
Alexandre Rames5319def2014-10-23 10:03:10 +01004667void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4668 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4669 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004670 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004671}
4672
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004673void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004674 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004675}
4676
4677void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4678 instruction->SetLocations(nullptr);
4679}
4680
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004681void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004682 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004683}
4684
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004685void LocationsBuilderARM64::VisitRor(HRor* ror) {
4686 HandleBinaryOp(ror);
4687}
4688
4689void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4690 HandleBinaryOp(ror);
4691}
4692
Serban Constantinescu02164b32014-11-13 14:05:07 +00004693void LocationsBuilderARM64::VisitShl(HShl* shl) {
4694 HandleShift(shl);
4695}
4696
4697void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4698 HandleShift(shl);
4699}
4700
4701void LocationsBuilderARM64::VisitShr(HShr* shr) {
4702 HandleShift(shr);
4703}
4704
4705void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4706 HandleShift(shr);
4707}
4708
Alexandre Rames5319def2014-10-23 10:03:10 +01004709void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004710 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004711}
4712
4713void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004714 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004715}
4716
Alexandre Rames67555f72014-11-18 10:55:16 +00004717void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004718 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004719}
4720
4721void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004722 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004723}
4724
4725void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004726 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004727}
4728
Alexandre Rames67555f72014-11-18 10:55:16 +00004729void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004730 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004731}
4732
Calin Juravlee460d1d2015-09-29 04:52:17 +01004733void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4734 HUnresolvedInstanceFieldGet* instruction) {
4735 FieldAccessCallingConventionARM64 calling_convention;
4736 codegen_->CreateUnresolvedFieldLocationSummary(
4737 instruction, instruction->GetFieldType(), calling_convention);
4738}
4739
4740void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4741 HUnresolvedInstanceFieldGet* instruction) {
4742 FieldAccessCallingConventionARM64 calling_convention;
4743 codegen_->GenerateUnresolvedFieldAccess(instruction,
4744 instruction->GetFieldType(),
4745 instruction->GetFieldIndex(),
4746 instruction->GetDexPc(),
4747 calling_convention);
4748}
4749
4750void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4751 HUnresolvedInstanceFieldSet* instruction) {
4752 FieldAccessCallingConventionARM64 calling_convention;
4753 codegen_->CreateUnresolvedFieldLocationSummary(
4754 instruction, instruction->GetFieldType(), calling_convention);
4755}
4756
4757void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4758 HUnresolvedInstanceFieldSet* instruction) {
4759 FieldAccessCallingConventionARM64 calling_convention;
4760 codegen_->GenerateUnresolvedFieldAccess(instruction,
4761 instruction->GetFieldType(),
4762 instruction->GetFieldIndex(),
4763 instruction->GetDexPc(),
4764 calling_convention);
4765}
4766
4767void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4768 HUnresolvedStaticFieldGet* instruction) {
4769 FieldAccessCallingConventionARM64 calling_convention;
4770 codegen_->CreateUnresolvedFieldLocationSummary(
4771 instruction, instruction->GetFieldType(), calling_convention);
4772}
4773
4774void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4775 HUnresolvedStaticFieldGet* instruction) {
4776 FieldAccessCallingConventionARM64 calling_convention;
4777 codegen_->GenerateUnresolvedFieldAccess(instruction,
4778 instruction->GetFieldType(),
4779 instruction->GetFieldIndex(),
4780 instruction->GetDexPc(),
4781 calling_convention);
4782}
4783
4784void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4785 HUnresolvedStaticFieldSet* instruction) {
4786 FieldAccessCallingConventionARM64 calling_convention;
4787 codegen_->CreateUnresolvedFieldLocationSummary(
4788 instruction, instruction->GetFieldType(), calling_convention);
4789}
4790
4791void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4792 HUnresolvedStaticFieldSet* instruction) {
4793 FieldAccessCallingConventionARM64 calling_convention;
4794 codegen_->GenerateUnresolvedFieldAccess(instruction,
4795 instruction->GetFieldType(),
4796 instruction->GetFieldIndex(),
4797 instruction->GetDexPc(),
4798 calling_convention);
4799}
4800
Alexandre Rames5319def2014-10-23 10:03:10 +01004801void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01004802 LocationSummary* locations =
4803 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4804 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
Alexandre Rames5319def2014-10-23 10:03:10 +01004805}
4806
4807void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004808 HBasicBlock* block = instruction->GetBlock();
4809 if (block->GetLoopInformation() != nullptr) {
4810 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4811 // The back edge will generate the suspend check.
4812 return;
4813 }
4814 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4815 // The goto will generate the suspend check.
4816 return;
4817 }
4818 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004819}
4820
Alexandre Rames67555f72014-11-18 10:55:16 +00004821void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4822 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004823 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004824 InvokeRuntimeCallingConvention calling_convention;
4825 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4826}
4827
4828void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004829 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004830 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004831}
4832
4833void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4834 LocationSummary* locations =
4835 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4836 Primitive::Type input_type = conversion->GetInputType();
4837 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004838 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004839 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4840 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4841 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4842 }
4843
Alexandre Rames542361f2015-01-29 16:57:31 +00004844 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004845 locations->SetInAt(0, Location::RequiresFpuRegister());
4846 } else {
4847 locations->SetInAt(0, Location::RequiresRegister());
4848 }
4849
Alexandre Rames542361f2015-01-29 16:57:31 +00004850 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004851 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4852 } else {
4853 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4854 }
4855}
4856
4857void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4858 Primitive::Type result_type = conversion->GetResultType();
4859 Primitive::Type input_type = conversion->GetInputType();
4860
4861 DCHECK_NE(input_type, result_type);
4862
Alexandre Rames542361f2015-01-29 16:57:31 +00004863 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004864 int result_size = Primitive::ComponentSize(result_type);
4865 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004866 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004867 Register output = OutputRegister(conversion);
4868 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004869 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004870 // 'int' values are used directly as W registers, discarding the top
4871 // bits, so we don't need to sign-extend and can just perform a move.
4872 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4873 // top 32 bits of the target register. We theoretically could leave those
4874 // bits unchanged, but we would have to make sure that no code uses a
4875 // 32bit input value as a 64bit value assuming that the top 32 bits are
4876 // zero.
4877 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004878 } else if (result_type == Primitive::kPrimChar ||
4879 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4880 __ Ubfx(output,
4881 output.IsX() ? source.X() : source.W(),
4882 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004883 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004884 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004885 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004886 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004887 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004888 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004889 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4890 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004891 } else if (Primitive::IsFloatingPointType(result_type) &&
4892 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004893 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4894 } else {
4895 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4896 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004897 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004898}
Alexandre Rames67555f72014-11-18 10:55:16 +00004899
Serban Constantinescu02164b32014-11-13 14:05:07 +00004900void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4901 HandleShift(ushr);
4902}
4903
4904void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4905 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004906}
4907
4908void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4909 HandleBinaryOp(instruction);
4910}
4911
4912void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4913 HandleBinaryOp(instruction);
4914}
4915
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004916void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004917 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004918 LOG(FATAL) << "Unreachable";
4919}
4920
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004921void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004922 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004923 LOG(FATAL) << "Unreachable";
4924}
4925
Mark Mendellfe57faa2015-09-18 09:26:15 -04004926// Simple implementation of packed switch - generate cascaded compare/jumps.
4927void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4928 LocationSummary* locations =
4929 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4930 locations->SetInAt(0, Location::RequiresRegister());
4931}
4932
4933void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4934 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004935 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004936 Register value_reg = InputRegisterAt(switch_instr, 0);
4937 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4938
Zheng Xu3927c8b2015-11-18 17:46:25 +08004939 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004940 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08004941 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4942 // make sure we don't emit it if the target may run out of range.
4943 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4944 // ranges and emit the tables only as required.
4945 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004946
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004947 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004948 // Current instruction id is an upper bound of the number of HIRs in the graph.
4949 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4950 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004951 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4952 Register temp = temps.AcquireW();
4953 __ Subs(temp, value_reg, Operand(lower_bound));
4954
Zheng Xu3927c8b2015-11-18 17:46:25 +08004955 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004956 // Jump to successors[0] if value == lower_bound.
4957 __ B(eq, codegen_->GetLabelOf(successors[0]));
4958 int32_t last_index = 0;
4959 for (; num_entries - last_index > 2; last_index += 2) {
4960 __ Subs(temp, temp, Operand(2));
4961 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4962 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4963 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4964 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4965 }
4966 if (num_entries - last_index == 2) {
4967 // The last missing case_value.
4968 __ Cmp(temp, Operand(1));
4969 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004970 }
4971
4972 // And the default for any other value.
4973 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4974 __ B(codegen_->GetLabelOf(default_block));
4975 }
4976 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004977 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004978
4979 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4980
4981 // Below instructions should use at most one blocked register. Since there are two blocked
4982 // registers, we are free to block one.
4983 Register temp_w = temps.AcquireW();
4984 Register index;
4985 // Remove the bias.
4986 if (lower_bound != 0) {
4987 index = temp_w;
4988 __ Sub(index, value_reg, Operand(lower_bound));
4989 } else {
4990 index = value_reg;
4991 }
4992
4993 // Jump to default block if index is out of the range.
4994 __ Cmp(index, Operand(num_entries));
4995 __ B(hs, codegen_->GetLabelOf(default_block));
4996
4997 // In current VIXL implementation, it won't require any blocked registers to encode the
4998 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4999 // register pressure.
5000 Register table_base = temps.AcquireX();
5001 // Load jump offset from the table.
5002 __ Adr(table_base, jump_table->GetTableStartLabel());
5003 Register jump_offset = temp_w;
5004 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5005
5006 // Jump to target block by branching to table_base(pc related) + offset.
5007 Register target_address = table_base;
5008 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5009 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005010 }
5011}
5012
Roland Levillain44015862016-01-22 11:47:17 +00005013void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
5014 Location out,
5015 uint32_t offset,
5016 Location maybe_temp) {
5017 Primitive::Type type = Primitive::kPrimNot;
5018 Register out_reg = RegisterFrom(out, type);
5019 if (kEmitCompilerReadBarrier) {
5020 Register temp_reg = RegisterFrom(maybe_temp, type);
5021 if (kUseBakerReadBarrier) {
5022 // Load with fast path based Baker's read barrier.
5023 // /* HeapReference<Object> */ out = *(out + offset)
5024 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5025 out,
5026 out_reg,
5027 offset,
5028 temp_reg,
5029 /* needs_null_check */ false,
5030 /* use_load_acquire */ false);
5031 } else {
5032 // Load with slow path based read barrier.
5033 // Save the value of `out` into `maybe_temp` before overwriting it
5034 // in the following move operation, as we will need it for the
5035 // read barrier below.
5036 __ Mov(temp_reg, out_reg);
5037 // /* HeapReference<Object> */ out = *(out + offset)
5038 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5039 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5040 }
5041 } else {
5042 // Plain load with no read barrier.
5043 // /* HeapReference<Object> */ out = *(out + offset)
5044 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5045 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5046 }
5047}
5048
5049void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
5050 Location out,
5051 Location obj,
5052 uint32_t offset,
5053 Location maybe_temp) {
5054 Primitive::Type type = Primitive::kPrimNot;
5055 Register out_reg = RegisterFrom(out, type);
5056 Register obj_reg = RegisterFrom(obj, type);
5057 if (kEmitCompilerReadBarrier) {
5058 if (kUseBakerReadBarrier) {
5059 // Load with fast path based Baker's read barrier.
5060 Register temp_reg = RegisterFrom(maybe_temp, type);
5061 // /* HeapReference<Object> */ out = *(obj + offset)
5062 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5063 out,
5064 obj_reg,
5065 offset,
5066 temp_reg,
5067 /* needs_null_check */ false,
5068 /* use_load_acquire */ false);
5069 } else {
5070 // Load with slow path based read barrier.
5071 // /* HeapReference<Object> */ out = *(obj + offset)
5072 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5073 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5074 }
5075 } else {
5076 // Plain load with no read barrier.
5077 // /* HeapReference<Object> */ out = *(obj + offset)
5078 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5079 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5080 }
5081}
5082
5083void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
5084 Location root,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005085 Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005086 uint32_t offset,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005087 vixl::aarch64::Label* fixup_label,
5088 bool requires_read_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005089 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005090 if (requires_read_barrier) {
5091 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005092 if (kUseBakerReadBarrier) {
5093 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5094 // Baker's read barrier are used:
5095 //
5096 // root = obj.field;
5097 // if (Thread::Current()->GetIsGcMarking()) {
5098 // root = ReadBarrier::Mark(root)
5099 // }
5100
5101 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005102 if (fixup_label == nullptr) {
5103 __ Ldr(root_reg, MemOperand(obj, offset));
5104 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005105 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005106 __ Bind(fixup_label);
5107 __ ldr(root_reg, MemOperand(obj, offset));
5108 }
Roland Levillain44015862016-01-22 11:47:17 +00005109 static_assert(
5110 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5111 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5112 "have different sizes.");
5113 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5114 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5115 "have different sizes.");
5116
Vladimir Marko953437b2016-08-24 08:30:46 +00005117 // Slow path marking the GC root `root`.
Roland Levillain44015862016-01-22 11:47:17 +00005118 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005119 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root);
Roland Levillain44015862016-01-22 11:47:17 +00005120 codegen_->AddSlowPath(slow_path);
5121
5122 MacroAssembler* masm = GetVIXLAssembler();
5123 UseScratchRegisterScope temps(masm);
5124 Register temp = temps.AcquireW();
5125 // temp = Thread::Current()->GetIsGcMarking()
Andreas Gampe542451c2016-07-26 09:02:02 -07005126 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64PointerSize>().Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00005127 __ Cbnz(temp, slow_path->GetEntryLabel());
5128 __ Bind(slow_path->GetExitLabel());
5129 } else {
5130 // GC root loaded through a slow path for read barriers other
5131 // than Baker's.
5132 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005133 if (fixup_label == nullptr) {
5134 __ Add(root_reg.X(), obj.X(), offset);
5135 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005136 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005137 __ Bind(fixup_label);
5138 __ add(root_reg.X(), obj.X(), offset);
5139 }
Roland Levillain44015862016-01-22 11:47:17 +00005140 // /* mirror::Object* */ root = root->Read()
5141 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5142 }
5143 } else {
5144 // Plain GC root load with no read barrier.
5145 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005146 if (fixup_label == nullptr) {
5147 __ Ldr(root_reg, MemOperand(obj, offset));
5148 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005149 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005150 __ Bind(fixup_label);
5151 __ ldr(root_reg, MemOperand(obj, offset));
5152 }
Roland Levillain44015862016-01-22 11:47:17 +00005153 // Note that GC roots are not affected by heap poisoning, thus we
5154 // do not have to unpoison `root_reg` here.
5155 }
5156}
5157
5158void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5159 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005160 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005161 uint32_t offset,
5162 Register temp,
5163 bool needs_null_check,
5164 bool use_load_acquire) {
5165 DCHECK(kEmitCompilerReadBarrier);
5166 DCHECK(kUseBakerReadBarrier);
5167
5168 // /* HeapReference<Object> */ ref = *(obj + offset)
5169 Location no_index = Location::NoLocation();
Roland Levillainbfea3352016-06-23 13:48:47 +01005170 size_t no_scale_factor = 0U;
5171 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5172 ref,
5173 obj,
5174 offset,
5175 no_index,
5176 no_scale_factor,
5177 temp,
5178 needs_null_check,
5179 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005180}
5181
5182void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5183 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005184 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005185 uint32_t data_offset,
5186 Location index,
5187 Register temp,
5188 bool needs_null_check) {
5189 DCHECK(kEmitCompilerReadBarrier);
5190 DCHECK(kUseBakerReadBarrier);
5191
5192 // Array cells are never volatile variables, therefore array loads
5193 // never use Load-Acquire instructions on ARM64.
5194 const bool use_load_acquire = false;
5195
Roland Levillainbfea3352016-06-23 13:48:47 +01005196 static_assert(
5197 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5198 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005199 // /* HeapReference<Object> */ ref =
5200 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005201 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5202 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5203 ref,
5204 obj,
5205 data_offset,
5206 index,
5207 scale_factor,
5208 temp,
5209 needs_null_check,
5210 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005211}
5212
5213void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5214 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005215 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005216 uint32_t offset,
5217 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005218 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005219 Register temp,
5220 bool needs_null_check,
5221 bool use_load_acquire) {
5222 DCHECK(kEmitCompilerReadBarrier);
5223 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005224 // If we are emitting an array load, we should not be using a
5225 // Load Acquire instruction. In other words:
5226 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5227 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005228
5229 MacroAssembler* masm = GetVIXLAssembler();
5230 UseScratchRegisterScope temps(masm);
5231
5232 // In slow path based read barriers, the read barrier call is
5233 // inserted after the original load. However, in fast path based
5234 // Baker's read barriers, we need to perform the load of
5235 // mirror::Object::monitor_ *before* the original reference load.
5236 // This load-load ordering is required by the read barrier.
5237 // The fast path/slow path (for Baker's algorithm) should look like:
5238 //
5239 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5240 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5241 // HeapReference<Object> ref = *src; // Original reference load.
5242 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5243 // if (is_gray) {
5244 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5245 // }
5246 //
5247 // Note: the original implementation in ReadBarrier::Barrier is
5248 // slightly more complex as it performs additional checks that we do
5249 // not do here for performance reasons.
5250
5251 Primitive::Type type = Primitive::kPrimNot;
5252 Register ref_reg = RegisterFrom(ref, type);
5253 DCHECK(obj.IsW());
5254 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5255
5256 // /* int32_t */ monitor = obj->monitor_
5257 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5258 if (needs_null_check) {
5259 MaybeRecordImplicitNullCheck(instruction);
5260 }
5261 // /* LockWord */ lock_word = LockWord(monitor)
5262 static_assert(sizeof(LockWord) == sizeof(int32_t),
5263 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005264
Vladimir Marko877a0332016-07-11 19:30:56 +01005265 // Introduce a dependency on the lock_word including rb_state,
5266 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005267 // a memory barrier (which would be more expensive).
Roland Levillain0b671c02016-08-19 12:02:34 +01005268 // `obj` is unchanged by this operation, but its value now depends
5269 // on `temp`.
Vladimir Marko877a0332016-07-11 19:30:56 +01005270 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005271
5272 // The actual reference load.
5273 if (index.IsValid()) {
Roland Levillainbfea3352016-06-23 13:48:47 +01005274 // Load types involving an "index".
5275 if (use_load_acquire) {
5276 // UnsafeGetObjectVolatile intrinsic case.
5277 // Register `index` is not an index in an object array, but an
5278 // offset to an object reference field within object `obj`.
5279 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5280 DCHECK(instruction->GetLocations()->Intrinsified());
5281 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5282 << instruction->AsInvoke()->GetIntrinsic();
5283 DCHECK_EQ(offset, 0U);
5284 DCHECK_EQ(scale_factor, 0U);
5285 DCHECK_EQ(needs_null_check, 0U);
5286 // /* HeapReference<Object> */ ref = *(obj + index)
5287 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5288 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005289 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005290 // ArrayGet and UnsafeGetObject intrinsics cases.
5291 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5292 if (index.IsConstant()) {
5293 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5294 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5295 } else {
Vladimir Marko877a0332016-07-11 19:30:56 +01005296 Register temp2 = temps.AcquireW();
Roland Levillainbfea3352016-06-23 13:48:47 +01005297 __ Add(temp2, obj, offset);
5298 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, scale_factor));
5299 temps.Release(temp2);
5300 }
Roland Levillain44015862016-01-22 11:47:17 +00005301 }
Roland Levillain44015862016-01-22 11:47:17 +00005302 } else {
5303 // /* HeapReference<Object> */ ref = *(obj + offset)
5304 MemOperand field = HeapOperand(obj, offset);
5305 if (use_load_acquire) {
5306 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5307 } else {
5308 Load(type, ref_reg, field);
5309 }
5310 }
5311
5312 // Object* ref = ref_addr->AsMirrorPtr()
5313 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5314
Vladimir Marko953437b2016-08-24 08:30:46 +00005315 // Slow path marking the object `ref` when it is gray.
Roland Levillain44015862016-01-22 11:47:17 +00005316 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005317 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
Roland Levillain44015862016-01-22 11:47:17 +00005318 AddSlowPath(slow_path);
5319
5320 // if (rb_state == ReadBarrier::gray_ptr_)
5321 // ref = ReadBarrier::Mark(ref);
Vladimir Marko877a0332016-07-11 19:30:56 +01005322 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5323 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
5324 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
5325 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
5326 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005327 __ Bind(slow_path->GetExitLabel());
5328}
5329
5330void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5331 Location out,
5332 Location ref,
5333 Location obj,
5334 uint32_t offset,
5335 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005336 DCHECK(kEmitCompilerReadBarrier);
5337
Roland Levillain44015862016-01-22 11:47:17 +00005338 // Insert a slow path based read barrier *after* the reference load.
5339 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005340 // If heap poisoning is enabled, the unpoisoning of the loaded
5341 // reference will be carried out by the runtime within the slow
5342 // path.
5343 //
5344 // Note that `ref` currently does not get unpoisoned (when heap
5345 // poisoning is enabled), which is alright as the `ref` argument is
5346 // not used by the artReadBarrierSlow entry point.
5347 //
5348 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5349 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5350 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5351 AddSlowPath(slow_path);
5352
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005353 __ B(slow_path->GetEntryLabel());
5354 __ Bind(slow_path->GetExitLabel());
5355}
5356
Roland Levillain44015862016-01-22 11:47:17 +00005357void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5358 Location out,
5359 Location ref,
5360 Location obj,
5361 uint32_t offset,
5362 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005363 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005364 // Baker's read barriers shall be handled by the fast path
5365 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5366 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005367 // If heap poisoning is enabled, unpoisoning will be taken care of
5368 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005369 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005370 } else if (kPoisonHeapReferences) {
5371 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5372 }
5373}
5374
Roland Levillain44015862016-01-22 11:47:17 +00005375void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5376 Location out,
5377 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005378 DCHECK(kEmitCompilerReadBarrier);
5379
Roland Levillain44015862016-01-22 11:47:17 +00005380 // Insert a slow path based read barrier *after* the GC root load.
5381 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005382 // Note that GC roots are not affected by heap poisoning, so we do
5383 // not need to do anything special for this here.
5384 SlowPathCodeARM64* slow_path =
5385 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5386 AddSlowPath(slow_path);
5387
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005388 __ B(slow_path->GetEntryLabel());
5389 __ Bind(slow_path->GetExitLabel());
5390}
5391
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005392void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5393 LocationSummary* locations =
5394 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5395 locations->SetInAt(0, Location::RequiresRegister());
5396 locations->SetOut(Location::RequiresRegister());
5397}
5398
5399void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5400 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005401 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005402 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005403 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005404 __ Ldr(XRegisterFrom(locations->Out()),
5405 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005406 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005407 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005408 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005409 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5410 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005411 __ Ldr(XRegisterFrom(locations->Out()),
5412 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005413 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005414}
5415
5416
5417
Alexandre Rames67555f72014-11-18 10:55:16 +00005418#undef __
5419#undef QUICK_ENTRY_POINT
5420
Alexandre Rames5319def2014-10-23 10:03:10 +01005421} // namespace arm64
5422} // namespace art