blob: 8084e497a9fac8424bf9e081c1b7ab66b02beac7 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Rames5319def2014-10-23 10:03:10 +010037
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
Roland Levillain22ccc3a2015-11-24 13:10:05 +000044template<class MirrorType>
45class GcRoot;
46
Alexandre Rames5319def2014-10-23 10:03:10 +010047namespace arm64 {
48
Alexandre Ramesbe919d92016-08-23 18:33:36 +010049using helpers::ARM64EncodableConstantOrRegister;
50using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080051using helpers::CPURegisterFrom;
52using helpers::DRegisterFrom;
53using helpers::FPRegisterFrom;
54using helpers::HeapOperand;
55using helpers::HeapOperandFrom;
56using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010057using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080058using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080059using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080061using helpers::Int64ConstantFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010062using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080063using helpers::LocationFrom;
64using helpers::OperandFromMemOperand;
65using helpers::OutputCPURegister;
66using helpers::OutputFPRegister;
67using helpers::OutputRegister;
68using helpers::RegisterFrom;
69using helpers::StackOperandFrom;
70using helpers::VIXLRegCodeFromART;
71using helpers::WRegisterFrom;
72using helpers::XRegisterFrom;
73
Alexandre Rames5319def2014-10-23 10:03:10 +010074static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000075// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080076// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
77// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000078static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010079
Alexandre Rames5319def2014-10-23 10:03:10 +010080inline Condition ARM64Condition(IfCondition cond) {
81 switch (cond) {
82 case kCondEQ: return eq;
83 case kCondNE: return ne;
84 case kCondLT: return lt;
85 case kCondLE: return le;
86 case kCondGT: return gt;
87 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070088 case kCondB: return lo;
89 case kCondBE: return ls;
90 case kCondA: return hi;
91 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010092 }
Roland Levillain7f63c522015-07-13 15:54:55 +000093 LOG(FATAL) << "Unreachable";
94 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010095}
96
Vladimir Markod6e069b2016-01-18 11:11:01 +000097inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
98 // The ARM64 condition codes can express all the necessary branches, see the
99 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
100 // There is no dex instruction or HIR that would need the missing conditions
101 // "equal or unordered" or "not equal".
102 switch (cond) {
103 case kCondEQ: return eq;
104 case kCondNE: return ne /* unordered */;
105 case kCondLT: return gt_bias ? cc : lt /* unordered */;
106 case kCondLE: return gt_bias ? ls : le /* unordered */;
107 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
108 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
109 default:
110 LOG(FATAL) << "UNREACHABLE";
111 UNREACHABLE();
112 }
113}
114
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000116 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
117 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
118 // but we use the exact registers for clarity.
119 if (return_type == Primitive::kPrimFloat) {
120 return LocationFrom(s0);
121 } else if (return_type == Primitive::kPrimDouble) {
122 return LocationFrom(d0);
123 } else if (return_type == Primitive::kPrimLong) {
124 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100125 } else if (return_type == Primitive::kPrimVoid) {
126 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000127 } else {
128 return LocationFrom(w0);
129 }
130}
131
Alexandre Rames5319def2014-10-23 10:03:10 +0100132Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000133 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100134}
135
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100136// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
137#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700138#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100139
Zheng Xuda403092015-04-24 17:35:39 +0800140// Calculate memory accessing operand for save/restore live registers.
141static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
142 RegisterSet* register_set,
143 int64_t spill_offset,
144 bool is_save) {
145 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
146 codegen->GetNumberOfCoreRegisters(),
147 register_set->GetFloatingPointRegisters(),
148 codegen->GetNumberOfFloatingPointRegisters()));
149
150 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100151 register_set->GetCoreRegisters() & (~callee_saved_core_registers.GetList()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000152 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100153 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.GetList()));
Zheng Xuda403092015-04-24 17:35:39 +0800154
155 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
156 UseScratchRegisterScope temps(masm);
157
158 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100159 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
160 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800161 int64_t reg_size = kXRegSizeInBytes;
162 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
163 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100164 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800165 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
166 // If the offset does not fit in the instruction's immediate field, use an alternate register
167 // to compute the base address(float point registers spill base address).
168 Register new_base = temps.AcquireSameSizeAs(base);
169 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
170 base = new_base;
171 spill_offset = -core_spill_size;
172 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
173 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
174 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
175 }
176
177 if (is_save) {
178 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
179 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
180 } else {
181 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
182 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
183 }
184}
185
186void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
187 RegisterSet* register_set = locations->GetLiveRegisters();
188 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
189 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
190 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
191 // If the register holds an object, update the stack mask.
192 if (locations->RegisterContainsObject(i)) {
193 locations->SetStackBit(stack_offset / kVRegSize);
194 }
195 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
196 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
197 saved_core_stack_offsets_[i] = stack_offset;
198 stack_offset += kXRegSizeInBytes;
199 }
200 }
201
202 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
203 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
204 register_set->ContainsFloatingPointRegister(i)) {
205 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
206 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
207 saved_fpu_stack_offsets_[i] = stack_offset;
208 stack_offset += kDRegSizeInBytes;
209 }
210 }
211
212 SaveRestoreLiveRegistersHelper(codegen, register_set,
213 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
214}
215
216void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
217 RegisterSet* register_set = locations->GetLiveRegisters();
218 SaveRestoreLiveRegistersHelper(codegen, register_set,
219 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
220}
221
Alexandre Rames5319def2014-10-23 10:03:10 +0100222class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
223 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000224 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100225
Alexandre Rames67555f72014-11-18 10:55:16 +0000226 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000228 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100229
Alexandre Rames5319def2014-10-23 10:03:10 +0100230 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000231 if (instruction_->CanThrowIntoCatchBlock()) {
232 // Live registers will be restored in the catch block if caught.
233 SaveLiveRegisters(codegen, instruction_->GetLocations());
234 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000235 // We're moving two locations to locations that could overlap, so we need a parallel
236 // move resolver.
237 InvokeRuntimeCallingConvention calling_convention;
238 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100239 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
240 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100241 uint32_t entry_point_offset = instruction_->AsBoundsCheck()->IsStringCharAt()
242 ? QUICK_ENTRY_POINT(pThrowStringBounds)
243 : QUICK_ENTRY_POINT(pThrowArrayBounds);
244 arm64_codegen->InvokeRuntime(entry_point_offset, instruction_, instruction_->GetDexPc(), this);
245 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800246 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100247 }
248
Alexandre Rames8158f282015-08-07 10:26:17 +0100249 bool IsFatal() const OVERRIDE { return true; }
250
Alexandre Rames9931f312015-06-19 14:47:01 +0100251 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
252
Alexandre Rames5319def2014-10-23 10:03:10 +0100253 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
255};
256
Alexandre Rames67555f72014-11-18 10:55:16 +0000257class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
258 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000259 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000260
261 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
262 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
263 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000264 if (instruction_->CanThrowIntoCatchBlock()) {
265 // Live registers will be restored in the catch block if caught.
266 SaveLiveRegisters(codegen, instruction_->GetLocations());
267 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000268 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000269 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800270 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000271 }
272
Alexandre Rames8158f282015-08-07 10:26:17 +0100273 bool IsFatal() const OVERRIDE { return true; }
274
Alexandre Rames9931f312015-06-19 14:47:01 +0100275 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
276
Alexandre Rames67555f72014-11-18 10:55:16 +0000277 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000278 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
279};
280
281class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
282 public:
283 LoadClassSlowPathARM64(HLoadClass* cls,
284 HInstruction* at,
285 uint32_t dex_pc,
286 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000287 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000288 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
289 }
290
291 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
292 LocationSummary* locations = at_->GetLocations();
293 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
294
295 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000296 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000297
298 InvokeRuntimeCallingConvention calling_convention;
299 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000300 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
301 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000302 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800303 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100304 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800305 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100306 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800307 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000308
309 // Move the class to the desired location.
310 Location out = locations->Out();
311 if (out.IsValid()) {
312 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
313 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000314 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000315 }
316
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000317 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000318 __ B(GetExitLabel());
319 }
320
Alexandre Rames9931f312015-06-19 14:47:01 +0100321 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
322
Alexandre Rames67555f72014-11-18 10:55:16 +0000323 private:
324 // The class this slow path will load.
325 HLoadClass* const cls_;
326
327 // The instruction where this slow path is happening.
328 // (Might be the load class or an initialization check).
329 HInstruction* const at_;
330
331 // The dex PC of `at_`.
332 const uint32_t dex_pc_;
333
334 // Whether to initialize the class.
335 const bool do_clinit_;
336
337 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
338};
339
340class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
341 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000342 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000343
344 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
345 LocationSummary* locations = instruction_->GetLocations();
346 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
347 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
348
349 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000350 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000351
352 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000353 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
354 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Alexandre Rames67555f72014-11-18 10:55:16 +0000355 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000356 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100357 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000358 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000359 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000360
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000361 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000362 __ B(GetExitLabel());
363 }
364
Alexandre Rames9931f312015-06-19 14:47:01 +0100365 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
366
Alexandre Rames67555f72014-11-18 10:55:16 +0000367 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000368 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
369};
370
Alexandre Rames5319def2014-10-23 10:03:10 +0100371class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
372 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000373 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100374
Alexandre Rames67555f72014-11-18 10:55:16 +0000375 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
376 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100377 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000378 if (instruction_->CanThrowIntoCatchBlock()) {
379 // Live registers will be restored in the catch block if caught.
380 SaveLiveRegisters(codegen, instruction_->GetLocations());
381 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000382 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000383 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800384 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100385 }
386
Alexandre Rames8158f282015-08-07 10:26:17 +0100387 bool IsFatal() const OVERRIDE { return true; }
388
Alexandre Rames9931f312015-06-19 14:47:01 +0100389 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
390
Alexandre Rames5319def2014-10-23 10:03:10 +0100391 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100392 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
393};
394
395class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
396 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100397 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000398 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100399
Alexandre Rames67555f72014-11-18 10:55:16 +0000400 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
401 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100402 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000403 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000404 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800405 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000406 if (successor_ == nullptr) {
407 __ B(GetReturnLabel());
408 } else {
409 __ B(arm64_codegen->GetLabelOf(successor_));
410 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100411 }
412
Scott Wakeling97c72b72016-06-24 16:19:36 +0100413 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100414 DCHECK(successor_ == nullptr);
415 return &return_label_;
416 }
417
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100418 HBasicBlock* GetSuccessor() const {
419 return successor_;
420 }
421
Alexandre Rames9931f312015-06-19 14:47:01 +0100422 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
423
Alexandre Rames5319def2014-10-23 10:03:10 +0100424 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100425 // If not null, the block to branch to after the suspend check.
426 HBasicBlock* const successor_;
427
428 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100429 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100430
431 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
432};
433
Alexandre Rames67555f72014-11-18 10:55:16 +0000434class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
435 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000436 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000437 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000438
439 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000440 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100441 Location class_to_check = locations->InAt(1);
442 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
443 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000444 DCHECK(instruction_->IsCheckCast()
445 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
446 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100447 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000448
Alexandre Rames67555f72014-11-18 10:55:16 +0000449 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000450
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000451 if (!is_fatal_) {
452 SaveLiveRegisters(codegen, locations);
453 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000454
455 // We're moving two locations to locations that could overlap, so we need a parallel
456 // move resolver.
457 InvokeRuntimeCallingConvention calling_convention;
458 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100459 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
460 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000461
462 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000463 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100464 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Andreas Gampe67409972016-07-19 22:34:53 -0700465 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t,
Roland Levillain888d0672015-11-23 18:53:50 +0000466 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000467 Primitive::Type ret_type = instruction_->GetType();
468 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
469 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
470 } else {
471 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100472 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800473 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000474 }
475
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000476 if (!is_fatal_) {
477 RestoreLiveRegisters(codegen, locations);
478 __ B(GetExitLabel());
479 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000480 }
481
Alexandre Rames9931f312015-06-19 14:47:01 +0100482 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000483 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100484
Alexandre Rames67555f72014-11-18 10:55:16 +0000485 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000486 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000487
Alexandre Rames67555f72014-11-18 10:55:16 +0000488 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
489};
490
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700491class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
492 public:
Aart Bik42249c32016-01-07 15:33:50 -0800493 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000494 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700495
496 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800497 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700498 __ Bind(GetEntryLabel());
499 SaveLiveRegisters(codegen, instruction_->GetLocations());
Aart Bik42249c32016-01-07 15:33:50 -0800500 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
501 instruction_,
502 instruction_->GetDexPc(),
503 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000504 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700505 }
506
Alexandre Rames9931f312015-06-19 14:47:01 +0100507 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
508
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700509 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700510 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
511};
512
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100513class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
514 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000515 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100516
517 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
518 LocationSummary* locations = instruction_->GetLocations();
519 __ Bind(GetEntryLabel());
520 SaveLiveRegisters(codegen, locations);
521
522 InvokeRuntimeCallingConvention calling_convention;
523 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
524 parallel_move.AddMove(
525 locations->InAt(0),
526 LocationFrom(calling_convention.GetRegisterAt(0)),
527 Primitive::kPrimNot,
528 nullptr);
529 parallel_move.AddMove(
530 locations->InAt(1),
531 LocationFrom(calling_convention.GetRegisterAt(1)),
532 Primitive::kPrimInt,
533 nullptr);
534 parallel_move.AddMove(
535 locations->InAt(2),
536 LocationFrom(calling_convention.GetRegisterAt(2)),
537 Primitive::kPrimNot,
538 nullptr);
539 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
540
541 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
542 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
543 instruction_,
544 instruction_->GetDexPc(),
545 this);
546 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
547 RestoreLiveRegisters(codegen, locations);
548 __ B(GetExitLabel());
549 }
550
551 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
552
553 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100554 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
555};
556
Zheng Xu3927c8b2015-11-18 17:46:25 +0800557void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
558 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000559 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800560
561 // We are about to use the assembler to place literals directly. Make sure we have enough
562 // underlying code buffer and we have generated the jump table with right size.
563 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
564 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
565
566 __ Bind(&table_start_);
567 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
568 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100569 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800570 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100571 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800572 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
573 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
574 Literal<int32_t> literal(jump_offset);
575 __ place(&literal);
576 }
577}
578
Roland Levillain44015862016-01-22 11:47:17 +0000579// Slow path marking an object during a read barrier.
580class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
581 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100582 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location obj)
583 : SlowPathCodeARM64(instruction), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000584 DCHECK(kEmitCompilerReadBarrier);
585 }
586
587 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
588
589 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
590 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain44015862016-01-22 11:47:17 +0000591 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100592 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(obj_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000593 DCHECK(instruction_->IsInstanceFieldGet() ||
594 instruction_->IsStaticFieldGet() ||
595 instruction_->IsArrayGet() ||
596 instruction_->IsLoadClass() ||
597 instruction_->IsLoadString() ||
598 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100599 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100600 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
601 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000602 << "Unexpected instruction in read barrier marking slow path: "
603 << instruction_->DebugName();
604
605 __ Bind(GetEntryLabel());
Roland Levillain4359e612016-07-20 11:32:19 +0100606 // No need to save live registers; it's taken care of by the
607 // entrypoint. Also, there is no need to update the stack mask,
608 // as this runtime call will not trigger a garbage collection.
Roland Levillain44015862016-01-22 11:47:17 +0000609 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100610 DCHECK_NE(obj_.reg(), LR);
611 DCHECK_NE(obj_.reg(), WSP);
612 DCHECK_NE(obj_.reg(), WZR);
Roland Levillain0b671c02016-08-19 12:02:34 +0100613 // IP0 is used internally by the ReadBarrierMarkRegX entry point
614 // as a temporary, it cannot be the entry point's input/output.
Mathieu Chartier36a270a2016-07-28 18:08:51 -0700615 DCHECK_NE(obj_.reg(), IP0);
Roland Levillain02b75802016-07-13 11:54:35 +0100616 DCHECK(0 <= obj_.reg() && obj_.reg() < kNumberOfWRegisters) << obj_.reg();
617 // "Compact" slow path, saving two moves.
618 //
619 // Instead of using the standard runtime calling convention (input
620 // and output in W0):
621 //
622 // W0 <- obj
623 // W0 <- ReadBarrierMark(W0)
624 // obj <- W0
625 //
626 // we just use rX (the register holding `obj`) as input and output
627 // of a dedicated entrypoint:
628 //
629 // rX <- ReadBarrierMarkRegX(rX)
630 //
631 int32_t entry_point_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -0700632 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(obj_.reg());
Roland Levillaindec8f632016-07-22 17:10:06 +0100633 // This runtime call does not require a stack map.
634 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain44015862016-01-22 11:47:17 +0000635 __ B(GetExitLabel());
636 }
637
638 private:
Roland Levillain44015862016-01-22 11:47:17 +0000639 const Location obj_;
640
641 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
642};
643
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000644// Slow path generating a read barrier for a heap reference.
645class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
646 public:
647 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
648 Location out,
649 Location ref,
650 Location obj,
651 uint32_t offset,
652 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000653 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000654 out_(out),
655 ref_(ref),
656 obj_(obj),
657 offset_(offset),
658 index_(index) {
659 DCHECK(kEmitCompilerReadBarrier);
660 // If `obj` is equal to `out` or `ref`, it means the initial object
661 // has been overwritten by (or after) the heap object reference load
662 // to be instrumented, e.g.:
663 //
664 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000665 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000666 //
667 // In that case, we have lost the information about the original
668 // object, and the emitted read barrier cannot work properly.
669 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
670 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
671 }
672
673 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
674 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
675 LocationSummary* locations = instruction_->GetLocations();
676 Primitive::Type type = Primitive::kPrimNot;
677 DCHECK(locations->CanCall());
678 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100679 DCHECK(instruction_->IsInstanceFieldGet() ||
680 instruction_->IsStaticFieldGet() ||
681 instruction_->IsArrayGet() ||
682 instruction_->IsInstanceOf() ||
683 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100684 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000685 << "Unexpected instruction in read barrier for heap reference slow path: "
686 << instruction_->DebugName();
Roland Levillain4a3aa572016-08-15 13:17:06 +0000687 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000688 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100689 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000690
691 __ Bind(GetEntryLabel());
692
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000693 SaveLiveRegisters(codegen, locations);
694
695 // We may have to change the index's value, but as `index_` is a
696 // constant member (like other "inputs" of this slow path),
697 // introduce a copy of it, `index`.
698 Location index = index_;
699 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100700 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000701 if (instruction_->IsArrayGet()) {
702 // Compute the actual memory offset and store it in `index`.
703 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
704 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
705 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
706 // We are about to change the value of `index_reg` (see the
707 // calls to vixl::MacroAssembler::Lsl and
708 // vixl::MacroAssembler::Mov below), but it has
709 // not been saved by the previous call to
710 // art::SlowPathCode::SaveLiveRegisters, as it is a
711 // callee-save register --
712 // art::SlowPathCode::SaveLiveRegisters does not consider
713 // callee-save registers, as it has been designed with the
714 // assumption that callee-save registers are supposed to be
715 // handled by the called function. So, as a callee-save
716 // register, `index_reg` _would_ eventually be saved onto
717 // the stack, but it would be too late: we would have
718 // changed its value earlier. Therefore, we manually save
719 // it here into another freely available register,
720 // `free_reg`, chosen of course among the caller-save
721 // registers (as a callee-save `free_reg` register would
722 // exhibit the same problem).
723 //
724 // Note we could have requested a temporary register from
725 // the register allocator instead; but we prefer not to, as
726 // this is a slow path, and we know we can find a
727 // caller-save register that is available.
728 Register free_reg = FindAvailableCallerSaveRegister(codegen);
729 __ Mov(free_reg.W(), index_reg);
730 index_reg = free_reg;
731 index = LocationFrom(index_reg);
732 } else {
733 // The initial register stored in `index_` has already been
734 // saved in the call to art::SlowPathCode::SaveLiveRegisters
735 // (as it is not a callee-save register), so we can freely
736 // use it.
737 }
738 // Shifting the index value contained in `index_reg` by the scale
739 // factor (2) cannot overflow in practice, as the runtime is
740 // unable to allocate object arrays with a size larger than
741 // 2^26 - 1 (that is, 2^28 - 4 bytes).
742 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
743 static_assert(
744 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
745 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
746 __ Add(index_reg, index_reg, Operand(offset_));
747 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100748 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
749 // intrinsics, `index_` is not shifted by a scale factor of 2
750 // (as in the case of ArrayGet), as it is actually an offset
751 // to an object field within an object.
752 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000753 DCHECK(instruction_->GetLocations()->Intrinsified());
754 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
755 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
756 << instruction_->AsInvoke()->GetIntrinsic();
757 DCHECK_EQ(offset_, 0U);
Roland Levillaina7426c62016-08-03 15:02:10 +0100758 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000759 }
760 }
761
762 // We're moving two or three locations to locations that could
763 // overlap, so we need a parallel move resolver.
764 InvokeRuntimeCallingConvention calling_convention;
765 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
766 parallel_move.AddMove(ref_,
767 LocationFrom(calling_convention.GetRegisterAt(0)),
768 type,
769 nullptr);
770 parallel_move.AddMove(obj_,
771 LocationFrom(calling_convention.GetRegisterAt(1)),
772 type,
773 nullptr);
774 if (index.IsValid()) {
775 parallel_move.AddMove(index,
776 LocationFrom(calling_convention.GetRegisterAt(2)),
777 Primitive::kPrimInt,
778 nullptr);
779 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
780 } else {
781 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
782 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
783 }
784 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
785 instruction_,
786 instruction_->GetDexPc(),
787 this);
788 CheckEntrypointTypes<
789 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
790 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
791
792 RestoreLiveRegisters(codegen, locations);
793
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000794 __ B(GetExitLabel());
795 }
796
797 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
798
799 private:
800 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100801 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
802 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000803 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
804 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
805 return Register(VIXLRegCodeFromART(i), kXRegSize);
806 }
807 }
808 // We shall never fail to find a free caller-save register, as
809 // there are more than two core caller-save registers on ARM64
810 // (meaning it is possible to find one which is different from
811 // `ref` and `obj`).
812 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
813 LOG(FATAL) << "Could not find a free register";
814 UNREACHABLE();
815 }
816
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000817 const Location out_;
818 const Location ref_;
819 const Location obj_;
820 const uint32_t offset_;
821 // An additional location containing an index to an array.
822 // Only used for HArrayGet and the UnsafeGetObject &
823 // UnsafeGetObjectVolatile intrinsics.
824 const Location index_;
825
826 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
827};
828
829// Slow path generating a read barrier for a GC root.
830class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
831 public:
832 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000833 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000834 DCHECK(kEmitCompilerReadBarrier);
835 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000836
837 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
838 LocationSummary* locations = instruction_->GetLocations();
839 Primitive::Type type = Primitive::kPrimNot;
840 DCHECK(locations->CanCall());
841 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000842 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
843 << "Unexpected instruction in read barrier for GC root slow path: "
844 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000845
846 __ Bind(GetEntryLabel());
847 SaveLiveRegisters(codegen, locations);
848
849 InvokeRuntimeCallingConvention calling_convention;
850 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
851 // The argument of the ReadBarrierForRootSlow is not a managed
852 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
853 // thus we need a 64-bit move here, and we cannot use
854 //
855 // arm64_codegen->MoveLocation(
856 // LocationFrom(calling_convention.GetRegisterAt(0)),
857 // root_,
858 // type);
859 //
860 // which would emit a 32-bit move, as `type` is a (32-bit wide)
861 // reference type (`Primitive::kPrimNot`).
862 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
863 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
864 instruction_,
865 instruction_->GetDexPc(),
866 this);
867 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
868 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
869
870 RestoreLiveRegisters(codegen, locations);
871 __ B(GetExitLabel());
872 }
873
874 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
875
876 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000877 const Location out_;
878 const Location root_;
879
880 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
881};
882
Alexandre Rames5319def2014-10-23 10:03:10 +0100883#undef __
884
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100885Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100886 Location next_location;
887 if (type == Primitive::kPrimVoid) {
888 LOG(FATAL) << "Unreachable type " << type;
889 }
890
Alexandre Rames542361f2015-01-29 16:57:31 +0000891 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100892 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
893 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000894 } else if (!Primitive::IsFloatingPointType(type) &&
895 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000896 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
897 } else {
898 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000899 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
900 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100901 }
902
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000903 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000904 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100905 return next_location;
906}
907
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100908Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100909 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100910}
911
Serban Constantinescu579885a2015-02-22 20:51:33 +0000912CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
913 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100914 const CompilerOptions& compiler_options,
915 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100916 : CodeGenerator(graph,
917 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000918 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000919 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100920 callee_saved_core_registers.GetList(),
921 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100922 compiler_options,
923 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100924 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800925 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100926 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000927 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000928 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100929 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000930 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000931 uint32_literals_(std::less<uint32_t>(),
932 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100933 uint64_literals_(std::less<uint64_t>(),
934 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
935 method_patches_(MethodReferenceComparator(),
936 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
937 call_patches_(MethodReferenceComparator(),
938 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
939 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000940 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
941 boot_image_string_patches_(StringReferenceValueComparator(),
942 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
943 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100944 boot_image_type_patches_(TypeReferenceValueComparator(),
945 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
946 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000947 boot_image_address_patches_(std::less<uint32_t>(),
948 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000949 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000950 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000951}
Alexandre Rames5319def2014-10-23 10:03:10 +0100952
Alexandre Rames67555f72014-11-18 10:55:16 +0000953#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100954
Zheng Xu3927c8b2015-11-18 17:46:25 +0800955void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100956 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800957 jump_table->EmitTable(this);
958 }
959}
960
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000961void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800962 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000963 // Ensure we emit the literal pool.
964 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000965
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000966 CodeGenerator::Finalize(allocator);
967}
968
Zheng Xuad4450e2015-04-17 18:48:56 +0800969void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
970 // Note: There are 6 kinds of moves:
971 // 1. constant -> GPR/FPR (non-cycle)
972 // 2. constant -> stack (non-cycle)
973 // 3. GPR/FPR -> GPR/FPR
974 // 4. GPR/FPR -> stack
975 // 5. stack -> GPR/FPR
976 // 6. stack -> stack (non-cycle)
977 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
978 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
979 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
980 // dependency.
981 vixl_temps_.Open(GetVIXLAssembler());
982}
983
984void ParallelMoveResolverARM64::FinishEmitNativeCode() {
985 vixl_temps_.Close();
986}
987
988Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
989 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
990 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
991 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
992 Location scratch = GetScratchLocation(kind);
993 if (!scratch.Equals(Location::NoLocation())) {
994 return scratch;
995 }
996 // Allocate from VIXL temp registers.
997 if (kind == Location::kRegister) {
998 scratch = LocationFrom(vixl_temps_.AcquireX());
999 } else {
1000 DCHECK(kind == Location::kFpuRegister);
1001 scratch = LocationFrom(vixl_temps_.AcquireD());
1002 }
1003 AddScratchLocation(scratch);
1004 return scratch;
1005}
1006
1007void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1008 if (loc.IsRegister()) {
1009 vixl_temps_.Release(XRegisterFrom(loc));
1010 } else {
1011 DCHECK(loc.IsFpuRegister());
1012 vixl_temps_.Release(DRegisterFrom(loc));
1013 }
1014 RemoveScratchLocation(loc);
1015}
1016
Alexandre Rames3e69f162014-12-10 10:36:50 +00001017void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001018 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001019 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001020}
1021
Alexandre Rames5319def2014-10-23 10:03:10 +01001022void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001023 MacroAssembler* masm = GetVIXLAssembler();
1024 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001025 __ Bind(&frame_entry_label_);
1026
Serban Constantinescu02164b32014-11-13 14:05:07 +00001027 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1028 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001029 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001030 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001031 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001032 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001033 __ Ldr(wzr, MemOperand(temp, 0));
1034 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001035 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001036
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001037 if (!HasEmptyFrame()) {
1038 int frame_size = GetFrameSize();
1039 // Stack layout:
1040 // sp[frame_size - 8] : lr.
1041 // ... : other preserved core registers.
1042 // ... : other preserved fp registers.
1043 // ... : reserved frame space.
1044 // sp[0] : current method.
1045 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001046 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001047 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1048 frame_size - GetCoreSpillSize());
1049 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1050 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001051 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001052}
1053
1054void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001055 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001056 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001057 if (!HasEmptyFrame()) {
1058 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001059 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1060 frame_size - FrameEntrySpillSize());
1061 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1062 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001063 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001064 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001065 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001066 __ Ret();
1067 GetAssembler()->cfi().RestoreState();
1068 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001069}
1070
Scott Wakeling97c72b72016-06-24 16:19:36 +01001071CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001072 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001073 return CPURegList(CPURegister::kRegister, kXRegSize,
1074 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001075}
1076
Scott Wakeling97c72b72016-06-24 16:19:36 +01001077CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001078 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1079 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001080 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1081 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001082}
1083
Alexandre Rames5319def2014-10-23 10:03:10 +01001084void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1085 __ Bind(GetLabelOf(block));
1086}
1087
Calin Juravle175dc732015-08-25 15:42:32 +01001088void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1089 DCHECK(location.IsRegister());
1090 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1091}
1092
Calin Juravlee460d1d2015-09-29 04:52:17 +01001093void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1094 if (location.IsRegister()) {
1095 locations->AddTemp(location);
1096 } else {
1097 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1098 }
1099}
1100
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001101void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001102 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001103 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001104 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001105 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001106 if (value_can_be_null) {
1107 __ Cbz(value, &done);
1108 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001109 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001110 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001111 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001112 if (value_can_be_null) {
1113 __ Bind(&done);
1114 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001115}
1116
David Brazdil58282f42016-01-14 12:45:10 +00001117void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001118 // Blocked core registers:
1119 // lr : Runtime reserved.
1120 // tr : Runtime reserved.
1121 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1122 // ip1 : VIXL core temp.
1123 // ip0 : VIXL core temp.
1124 //
1125 // Blocked fp registers:
1126 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001127 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1128 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001129 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001130 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001131 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001132
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001133 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001134 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001135 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001136 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001137
David Brazdil58282f42016-01-14 12:45:10 +00001138 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001139 // Stubs do not save callee-save floating point registers. If the graph
1140 // is debuggable, we need to deal with these registers differently. For
1141 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001142 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1143 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001144 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001145 }
1146 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001147}
1148
Alexandre Rames3e69f162014-12-10 10:36:50 +00001149size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1150 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1151 __ Str(reg, MemOperand(sp, stack_index));
1152 return kArm64WordSize;
1153}
1154
1155size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1156 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1157 __ Ldr(reg, MemOperand(sp, stack_index));
1158 return kArm64WordSize;
1159}
1160
1161size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1162 FPRegister reg = FPRegister(reg_id, kDRegSize);
1163 __ Str(reg, MemOperand(sp, stack_index));
1164 return kArm64WordSize;
1165}
1166
1167size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1168 FPRegister reg = FPRegister(reg_id, kDRegSize);
1169 __ Ldr(reg, MemOperand(sp, stack_index));
1170 return kArm64WordSize;
1171}
1172
Alexandre Rames5319def2014-10-23 10:03:10 +01001173void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001174 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001175}
1176
1177void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001178 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001179}
1180
Alexandre Rames67555f72014-11-18 10:55:16 +00001181void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001182 if (constant->IsIntConstant()) {
1183 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1184 } else if (constant->IsLongConstant()) {
1185 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1186 } else if (constant->IsNullConstant()) {
1187 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001188 } else if (constant->IsFloatConstant()) {
1189 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1190 } else {
1191 DCHECK(constant->IsDoubleConstant());
1192 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1193 }
1194}
1195
Alexandre Rames3e69f162014-12-10 10:36:50 +00001196
1197static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1198 DCHECK(constant.IsConstant());
1199 HConstant* cst = constant.GetConstant();
1200 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001201 // Null is mapped to a core W register, which we associate with kPrimInt.
1202 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001203 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1204 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1205 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1206}
1207
Calin Juravlee460d1d2015-09-29 04:52:17 +01001208void CodeGeneratorARM64::MoveLocation(Location destination,
1209 Location source,
1210 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001211 if (source.Equals(destination)) {
1212 return;
1213 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001214
1215 // A valid move can always be inferred from the destination and source
1216 // locations. When moving from and to a register, the argument type can be
1217 // used to generate 32bit instead of 64bit moves. In debug mode we also
1218 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001219 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001220
1221 if (destination.IsRegister() || destination.IsFpuRegister()) {
1222 if (unspecified_type) {
1223 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1224 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001225 (src_cst != nullptr && (src_cst->IsIntConstant()
1226 || src_cst->IsFloatConstant()
1227 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001228 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001229 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001230 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001231 // If the source is a double stack slot or a 64bit constant, a 64bit
1232 // type is appropriate. Else the source is a register, and since the
1233 // type has not been specified, we chose a 64bit type to force a 64bit
1234 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001235 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001236 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001237 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001238 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1239 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1240 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001241 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1242 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1243 __ Ldr(dst, StackOperandFrom(source));
1244 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001245 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001246 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001247 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001248 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001249 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001250 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001251 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001252 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1253 ? Primitive::kPrimLong
1254 : Primitive::kPrimInt;
1255 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1256 }
1257 } else {
1258 DCHECK(source.IsFpuRegister());
1259 if (destination.IsRegister()) {
1260 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1261 ? Primitive::kPrimDouble
1262 : Primitive::kPrimFloat;
1263 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1264 } else {
1265 DCHECK(destination.IsFpuRegister());
1266 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001267 }
1268 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001269 } else { // The destination is not a register. It must be a stack slot.
1270 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1271 if (source.IsRegister() || source.IsFpuRegister()) {
1272 if (unspecified_type) {
1273 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001274 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001275 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001276 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001277 }
1278 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001279 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1280 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1281 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001282 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001283 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1284 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001285 UseScratchRegisterScope temps(GetVIXLAssembler());
1286 HConstant* src_cst = source.GetConstant();
1287 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001288 if (src_cst->IsZeroBitPattern()) {
1289 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant()) ? xzr : wzr;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001290 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001291 if (src_cst->IsIntConstant()) {
1292 temp = temps.AcquireW();
1293 } else if (src_cst->IsLongConstant()) {
1294 temp = temps.AcquireX();
1295 } else if (src_cst->IsFloatConstant()) {
1296 temp = temps.AcquireS();
1297 } else {
1298 DCHECK(src_cst->IsDoubleConstant());
1299 temp = temps.AcquireD();
1300 }
1301 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001302 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001303 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001304 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001305 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001306 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001307 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001308 // There is generally less pressure on FP registers.
1309 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001310 __ Ldr(temp, StackOperandFrom(source));
1311 __ Str(temp, StackOperandFrom(destination));
1312 }
1313 }
1314}
1315
1316void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001317 CPURegister dst,
1318 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001319 switch (type) {
1320 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001321 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001322 break;
1323 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001324 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001325 break;
1326 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001327 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001328 break;
1329 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001330 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001331 break;
1332 case Primitive::kPrimInt:
1333 case Primitive::kPrimNot:
1334 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001335 case Primitive::kPrimFloat:
1336 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001337 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001338 __ Ldr(dst, src);
1339 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001340 case Primitive::kPrimVoid:
1341 LOG(FATAL) << "Unreachable type " << type;
1342 }
1343}
1344
Calin Juravle77520bc2015-01-12 18:45:46 +00001345void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001346 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001347 const MemOperand& src,
1348 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001349 MacroAssembler* masm = GetVIXLAssembler();
1350 BlockPoolsScope block_pools(masm);
1351 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001352 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001353 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001354
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001355 DCHECK(!src.IsPreIndex());
1356 DCHECK(!src.IsPostIndex());
1357
1358 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001359 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001360 MemOperand base = MemOperand(temp_base);
1361 switch (type) {
1362 case Primitive::kPrimBoolean:
1363 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001364 if (needs_null_check) {
1365 MaybeRecordImplicitNullCheck(instruction);
1366 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001367 break;
1368 case Primitive::kPrimByte:
1369 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001370 if (needs_null_check) {
1371 MaybeRecordImplicitNullCheck(instruction);
1372 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001373 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1374 break;
1375 case Primitive::kPrimChar:
1376 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001377 if (needs_null_check) {
1378 MaybeRecordImplicitNullCheck(instruction);
1379 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001380 break;
1381 case Primitive::kPrimShort:
1382 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001383 if (needs_null_check) {
1384 MaybeRecordImplicitNullCheck(instruction);
1385 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001386 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1387 break;
1388 case Primitive::kPrimInt:
1389 case Primitive::kPrimNot:
1390 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001391 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001392 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001393 if (needs_null_check) {
1394 MaybeRecordImplicitNullCheck(instruction);
1395 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001396 break;
1397 case Primitive::kPrimFloat:
1398 case Primitive::kPrimDouble: {
1399 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001400 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001401
1402 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1403 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001404 if (needs_null_check) {
1405 MaybeRecordImplicitNullCheck(instruction);
1406 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001407 __ Fmov(FPRegister(dst), temp);
1408 break;
1409 }
1410 case Primitive::kPrimVoid:
1411 LOG(FATAL) << "Unreachable type " << type;
1412 }
1413}
1414
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001415void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001416 CPURegister src,
1417 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001418 switch (type) {
1419 case Primitive::kPrimBoolean:
1420 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001421 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001422 break;
1423 case Primitive::kPrimChar:
1424 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001425 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001426 break;
1427 case Primitive::kPrimInt:
1428 case Primitive::kPrimNot:
1429 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001430 case Primitive::kPrimFloat:
1431 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001432 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001433 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001434 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001435 case Primitive::kPrimVoid:
1436 LOG(FATAL) << "Unreachable type " << type;
1437 }
1438}
1439
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001440void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1441 CPURegister src,
1442 const MemOperand& dst) {
1443 UseScratchRegisterScope temps(GetVIXLAssembler());
1444 Register temp_base = temps.AcquireX();
1445
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001446 DCHECK(!dst.IsPreIndex());
1447 DCHECK(!dst.IsPostIndex());
1448
1449 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001450 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001451 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001452 MemOperand base = MemOperand(temp_base);
1453 switch (type) {
1454 case Primitive::kPrimBoolean:
1455 case Primitive::kPrimByte:
1456 __ Stlrb(Register(src), base);
1457 break;
1458 case Primitive::kPrimChar:
1459 case Primitive::kPrimShort:
1460 __ Stlrh(Register(src), base);
1461 break;
1462 case Primitive::kPrimInt:
1463 case Primitive::kPrimNot:
1464 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001465 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001466 __ Stlr(Register(src), base);
1467 break;
1468 case Primitive::kPrimFloat:
1469 case Primitive::kPrimDouble: {
Alexandre Rames542361f2015-01-29 16:57:31 +00001470 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001471 Register temp_src;
1472 if (src.IsZero()) {
1473 // The zero register is used to avoid synthesizing zero constants.
1474 temp_src = Register(src);
1475 } else {
1476 DCHECK(src.IsFPRegister());
1477 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1478 __ Fmov(temp_src, FPRegister(src));
1479 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001480
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001481 __ Stlr(temp_src, base);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001482 break;
1483 }
1484 case Primitive::kPrimVoid:
1485 LOG(FATAL) << "Unreachable type " << type;
1486 }
1487}
1488
Calin Juravle175dc732015-08-25 15:42:32 +01001489void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1490 HInstruction* instruction,
1491 uint32_t dex_pc,
1492 SlowPathCode* slow_path) {
Andreas Gampe542451c2016-07-26 09:02:02 -07001493 InvokeRuntime(GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value(),
Calin Juravle175dc732015-08-25 15:42:32 +01001494 instruction,
1495 dex_pc,
1496 slow_path);
1497}
1498
Alexandre Rames67555f72014-11-18 10:55:16 +00001499void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1500 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001501 uint32_t dex_pc,
1502 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001503 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001504 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001505 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1506 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001507 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001508}
1509
Roland Levillaindec8f632016-07-22 17:10:06 +01001510void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1511 HInstruction* instruction,
1512 SlowPathCode* slow_path) {
1513 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1514 BlockPoolsScope block_pools(GetVIXLAssembler());
1515 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1516 __ Blr(lr);
1517}
1518
Alexandre Rames67555f72014-11-18 10:55:16 +00001519void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001520 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001521 UseScratchRegisterScope temps(GetVIXLAssembler());
1522 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001523 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1524
Serban Constantinescu02164b32014-11-13 14:05:07 +00001525 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001526 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1527 __ Add(temp, class_reg, status_offset);
1528 __ Ldar(temp, HeapOperand(temp));
1529 __ Cmp(temp, mirror::Class::kStatusInitialized);
1530 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001531 __ Bind(slow_path->GetExitLabel());
1532}
Alexandre Rames5319def2014-10-23 10:03:10 +01001533
Roland Levillain44015862016-01-22 11:47:17 +00001534void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001535 BarrierType type = BarrierAll;
1536
1537 switch (kind) {
1538 case MemBarrierKind::kAnyAny:
1539 case MemBarrierKind::kAnyStore: {
1540 type = BarrierAll;
1541 break;
1542 }
1543 case MemBarrierKind::kLoadAny: {
1544 type = BarrierReads;
1545 break;
1546 }
1547 case MemBarrierKind::kStoreStore: {
1548 type = BarrierWrites;
1549 break;
1550 }
1551 default:
1552 LOG(FATAL) << "Unexpected memory barrier " << kind;
1553 }
1554 __ Dmb(InnerShareable, type);
1555}
1556
Serban Constantinescu02164b32014-11-13 14:05:07 +00001557void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1558 HBasicBlock* successor) {
1559 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001560 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1561 if (slow_path == nullptr) {
1562 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1563 instruction->SetSlowPath(slow_path);
1564 codegen_->AddSlowPath(slow_path);
1565 if (successor != nullptr) {
1566 DCHECK(successor->IsLoopHeader());
1567 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1568 }
1569 } else {
1570 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1571 }
1572
Serban Constantinescu02164b32014-11-13 14:05:07 +00001573 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1574 Register temp = temps.AcquireW();
1575
Andreas Gampe542451c2016-07-26 09:02:02 -07001576 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001577 if (successor == nullptr) {
1578 __ Cbnz(temp, slow_path->GetEntryLabel());
1579 __ Bind(slow_path->GetReturnLabel());
1580 } else {
1581 __ Cbz(temp, codegen_->GetLabelOf(successor));
1582 __ B(slow_path->GetEntryLabel());
1583 // slow_path will return to GetLabelOf(successor).
1584 }
1585}
1586
Alexandre Rames5319def2014-10-23 10:03:10 +01001587InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1588 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001589 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001590 assembler_(codegen->GetAssembler()),
1591 codegen_(codegen) {}
1592
1593#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001594 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001595
1596#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1597
1598enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001599 // Using a base helps identify when we hit such breakpoints.
1600 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001601#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1602 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1603#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1604};
1605
1606#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001607 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001608 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1609 } \
1610 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1611 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1612 locations->SetOut(Location::Any()); \
1613 }
1614 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1615#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1616
1617#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001618#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001619
Alexandre Rames67555f72014-11-18 10:55:16 +00001620void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001621 DCHECK_EQ(instr->InputCount(), 2U);
1622 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1623 Primitive::Type type = instr->GetResultType();
1624 switch (type) {
1625 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001626 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001627 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001628 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001629 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001630 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001631
1632 case Primitive::kPrimFloat:
1633 case Primitive::kPrimDouble:
1634 locations->SetInAt(0, Location::RequiresFpuRegister());
1635 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001636 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001637 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001638
Alexandre Rames5319def2014-10-23 10:03:10 +01001639 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001640 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001641 }
1642}
1643
Alexandre Rames09a99962015-04-15 11:47:56 +01001644void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001645 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1646
1647 bool object_field_get_with_read_barrier =
1648 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001649 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001650 new (GetGraph()->GetArena()) LocationSummary(instruction,
1651 object_field_get_with_read_barrier ?
1652 LocationSummary::kCallOnSlowPath :
1653 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001654 locations->SetInAt(0, Location::RequiresRegister());
1655 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1656 locations->SetOut(Location::RequiresFpuRegister());
1657 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001658 // The output overlaps for an object field get when read barriers
1659 // are enabled: we do not want the load to overwrite the object's
1660 // location, as we need it to emit the read barrier.
1661 locations->SetOut(
1662 Location::RequiresRegister(),
1663 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001664 }
1665}
1666
1667void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1668 const FieldInfo& field_info) {
1669 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001670 LocationSummary* locations = instruction->GetLocations();
1671 Location base_loc = locations->InAt(0);
1672 Location out = locations->Out();
1673 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001674 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001675 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001676 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001677
Roland Levillain44015862016-01-22 11:47:17 +00001678 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1679 // Object FieldGet with Baker's read barrier case.
1680 MacroAssembler* masm = GetVIXLAssembler();
1681 UseScratchRegisterScope temps(masm);
1682 // /* HeapReference<Object> */ out = *(base + offset)
1683 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1684 Register temp = temps.AcquireW();
1685 // Note that potential implicit null checks are handled in this
1686 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1687 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1688 instruction,
1689 out,
1690 base,
1691 offset,
1692 temp,
1693 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001694 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001695 } else {
1696 // General case.
1697 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001698 // Note that a potential implicit null check is handled in this
1699 // CodeGeneratorARM64::LoadAcquire call.
1700 // NB: LoadAcquire will record the pc info if needed.
1701 codegen_->LoadAcquire(
1702 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001703 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001704 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001705 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001706 }
Roland Levillain44015862016-01-22 11:47:17 +00001707 if (field_type == Primitive::kPrimNot) {
1708 // If read barriers are enabled, emit read barriers other than
1709 // Baker's using a slow path (and also unpoison the loaded
1710 // reference, if heap poisoning is enabled).
1711 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1712 }
Roland Levillain4d027112015-07-01 15:41:14 +01001713 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001714}
1715
1716void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1717 LocationSummary* locations =
1718 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1719 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001720 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
1721 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
1722 } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001723 locations->SetInAt(1, Location::RequiresFpuRegister());
1724 } else {
1725 locations->SetInAt(1, Location::RequiresRegister());
1726 }
1727}
1728
1729void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001730 const FieldInfo& field_info,
1731 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001732 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001733 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001734
1735 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001736 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001737 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001738 Offset offset = field_info.GetFieldOffset();
1739 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001740
Roland Levillain4d027112015-07-01 15:41:14 +01001741 {
1742 // We use a block to end the scratch scope before the write barrier, thus
1743 // freeing the temporary registers so they can be used in `MarkGCCard`.
1744 UseScratchRegisterScope temps(GetVIXLAssembler());
1745
1746 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1747 DCHECK(value.IsW());
1748 Register temp = temps.AcquireW();
1749 __ Mov(temp, value.W());
1750 GetAssembler()->PoisonHeapReference(temp.W());
1751 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001752 }
Roland Levillain4d027112015-07-01 15:41:14 +01001753
1754 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001755 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1756 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001757 } else {
1758 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1759 codegen_->MaybeRecordImplicitNullCheck(instruction);
1760 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001761 }
1762
1763 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001764 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001765 }
1766}
1767
Alexandre Rames67555f72014-11-18 10:55:16 +00001768void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001769 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001770
1771 switch (type) {
1772 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001773 case Primitive::kPrimLong: {
1774 Register dst = OutputRegister(instr);
1775 Register lhs = InputRegisterAt(instr, 0);
1776 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001777 if (instr->IsAdd()) {
1778 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001779 } else if (instr->IsAnd()) {
1780 __ And(dst, lhs, rhs);
1781 } else if (instr->IsOr()) {
1782 __ Orr(dst, lhs, rhs);
1783 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001784 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001785 } else if (instr->IsRor()) {
1786 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001787 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001788 __ Ror(dst, lhs, shift);
1789 } else {
1790 // Ensure shift distance is in the same size register as the result. If
1791 // we are rotating a long and the shift comes in a w register originally,
1792 // we don't need to sxtw for use as an x since the shift distances are
1793 // all & reg_bits - 1.
1794 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1795 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001796 } else {
1797 DCHECK(instr->IsXor());
1798 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001799 }
1800 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001801 }
1802 case Primitive::kPrimFloat:
1803 case Primitive::kPrimDouble: {
1804 FPRegister dst = OutputFPRegister(instr);
1805 FPRegister lhs = InputFPRegisterAt(instr, 0);
1806 FPRegister rhs = InputFPRegisterAt(instr, 1);
1807 if (instr->IsAdd()) {
1808 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001809 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001810 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001811 } else {
1812 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001813 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001814 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001815 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001816 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001817 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001818 }
1819}
1820
Serban Constantinescu02164b32014-11-13 14:05:07 +00001821void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1822 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1823
1824 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1825 Primitive::Type type = instr->GetResultType();
1826 switch (type) {
1827 case Primitive::kPrimInt:
1828 case Primitive::kPrimLong: {
1829 locations->SetInAt(0, Location::RequiresRegister());
1830 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1831 locations->SetOut(Location::RequiresRegister());
1832 break;
1833 }
1834 default:
1835 LOG(FATAL) << "Unexpected shift type " << type;
1836 }
1837}
1838
1839void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1840 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1841
1842 Primitive::Type type = instr->GetType();
1843 switch (type) {
1844 case Primitive::kPrimInt:
1845 case Primitive::kPrimLong: {
1846 Register dst = OutputRegister(instr);
1847 Register lhs = InputRegisterAt(instr, 0);
1848 Operand rhs = InputOperandAt(instr, 1);
1849 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001850 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00001851 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001852 if (instr->IsShl()) {
1853 __ Lsl(dst, lhs, shift_value);
1854 } else if (instr->IsShr()) {
1855 __ Asr(dst, lhs, shift_value);
1856 } else {
1857 __ Lsr(dst, lhs, shift_value);
1858 }
1859 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001860 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001861
1862 if (instr->IsShl()) {
1863 __ Lsl(dst, lhs, rhs_reg);
1864 } else if (instr->IsShr()) {
1865 __ Asr(dst, lhs, rhs_reg);
1866 } else {
1867 __ Lsr(dst, lhs, rhs_reg);
1868 }
1869 }
1870 break;
1871 }
1872 default:
1873 LOG(FATAL) << "Unexpected shift operation type " << type;
1874 }
1875}
1876
Alexandre Rames5319def2014-10-23 10:03:10 +01001877void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001878 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001879}
1880
1881void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001882 HandleBinaryOp(instruction);
1883}
1884
1885void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1886 HandleBinaryOp(instruction);
1887}
1888
1889void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1890 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001891}
1892
Artem Serov7fc63502016-02-09 17:15:29 +00001893void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001894 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1895 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1896 locations->SetInAt(0, Location::RequiresRegister());
1897 // There is no immediate variant of negated bitwise instructions in AArch64.
1898 locations->SetInAt(1, Location::RequiresRegister());
1899 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1900}
1901
Artem Serov7fc63502016-02-09 17:15:29 +00001902void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001903 Register dst = OutputRegister(instr);
1904 Register lhs = InputRegisterAt(instr, 0);
1905 Register rhs = InputRegisterAt(instr, 1);
1906
1907 switch (instr->GetOpKind()) {
1908 case HInstruction::kAnd:
1909 __ Bic(dst, lhs, rhs);
1910 break;
1911 case HInstruction::kOr:
1912 __ Orn(dst, lhs, rhs);
1913 break;
1914 case HInstruction::kXor:
1915 __ Eon(dst, lhs, rhs);
1916 break;
1917 default:
1918 LOG(FATAL) << "Unreachable";
1919 }
1920}
1921
Alexandre Rames8626b742015-11-25 16:28:08 +00001922void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1923 HArm64DataProcWithShifterOp* instruction) {
1924 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1925 instruction->GetType() == Primitive::kPrimLong);
1926 LocationSummary* locations =
1927 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1928 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1929 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1930 } else {
1931 locations->SetInAt(0, Location::RequiresRegister());
1932 }
1933 locations->SetInAt(1, Location::RequiresRegister());
1934 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1935}
1936
1937void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1938 HArm64DataProcWithShifterOp* instruction) {
1939 Primitive::Type type = instruction->GetType();
1940 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1941 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1942 Register out = OutputRegister(instruction);
1943 Register left;
1944 if (kind != HInstruction::kNeg) {
1945 left = InputRegisterAt(instruction, 0);
1946 }
1947 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1948 // shifter operand operation, the IR generating `right_reg` (input to the type
1949 // conversion) can have a different type from the current instruction's type,
1950 // so we manually indicate the type.
1951 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001952 int64_t shift_amount = instruction->GetShiftAmount() &
1953 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001954
1955 Operand right_operand(0);
1956
1957 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1958 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1959 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1960 } else {
1961 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1962 }
1963
1964 // Logical binary operations do not support extension operations in the
1965 // operand. Note that VIXL would still manage if it was passed by generating
1966 // the extension as a separate instruction.
1967 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1968 DCHECK(!right_operand.IsExtendedRegister() ||
1969 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1970 kind != HInstruction::kNeg));
1971 switch (kind) {
1972 case HInstruction::kAdd:
1973 __ Add(out, left, right_operand);
1974 break;
1975 case HInstruction::kAnd:
1976 __ And(out, left, right_operand);
1977 break;
1978 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001979 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001980 __ Neg(out, right_operand);
1981 break;
1982 case HInstruction::kOr:
1983 __ Orr(out, left, right_operand);
1984 break;
1985 case HInstruction::kSub:
1986 __ Sub(out, left, right_operand);
1987 break;
1988 case HInstruction::kXor:
1989 __ Eor(out, left, right_operand);
1990 break;
1991 default:
1992 LOG(FATAL) << "Unexpected operation kind: " << kind;
1993 UNREACHABLE();
1994 }
1995}
1996
Artem Serov328429f2016-07-06 16:23:04 +01001997void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00001998 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
1999 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002000 LocationSummary* locations =
2001 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2002 locations->SetInAt(0, Location::RequiresRegister());
2003 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
2004 locations->SetOut(Location::RequiresRegister());
2005}
2006
Roland Levillain4a3aa572016-08-15 13:17:06 +00002007void InstructionCodeGeneratorARM64::VisitIntermediateAddress(
2008 HIntermediateAddress* instruction) {
2009 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
2010 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002011 __ Add(OutputRegister(instruction),
2012 InputRegisterAt(instruction, 0),
2013 Operand(InputOperandAt(instruction, 1)));
2014}
2015
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002016void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002017 LocationSummary* locations =
2018 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002019 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2020 if (instr->GetOpKind() == HInstruction::kSub &&
2021 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002022 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002023 // Don't allocate register for Mneg instruction.
2024 } else {
2025 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2026 Location::RequiresRegister());
2027 }
2028 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2029 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002030 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2031}
2032
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002033void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002034 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002035 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2036 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002037
2038 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2039 // This fixup should be carried out for all multiply-accumulate instructions:
2040 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2041 if (instr->GetType() == Primitive::kPrimLong &&
2042 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2043 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002044 vixl::aarch64::Instruction* prev =
2045 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002046 if (prev->IsLoadOrStore()) {
2047 // Make sure we emit only exactly one nop.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002048 vixl::aarch64::CodeBufferCheckScope scope(masm,
2049 kInstructionSize,
2050 vixl::aarch64::CodeBufferCheckScope::kCheck,
2051 vixl::aarch64::CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002052 __ nop();
2053 }
2054 }
2055
2056 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002057 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002058 __ Madd(res, mul_left, mul_right, accumulator);
2059 } else {
2060 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002061 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002062 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002063 __ Mneg(res, mul_left, mul_right);
2064 } else {
2065 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2066 __ Msub(res, mul_left, mul_right, accumulator);
2067 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002068 }
2069}
2070
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002071void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002072 bool object_array_get_with_read_barrier =
2073 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002074 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002075 new (GetGraph()->GetArena()) LocationSummary(instruction,
2076 object_array_get_with_read_barrier ?
2077 LocationSummary::kCallOnSlowPath :
2078 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002079 locations->SetInAt(0, Location::RequiresRegister());
2080 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002081 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2082 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2083 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002084 // The output overlaps in the case of an object array get with
2085 // read barriers enabled: we do not want the move to overwrite the
2086 // array's location, as we need it to emit the read barrier.
2087 locations->SetOut(
2088 Location::RequiresRegister(),
2089 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002090 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002091}
2092
2093void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002094 Primitive::Type type = instruction->GetType();
2095 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002096 LocationSummary* locations = instruction->GetLocations();
2097 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002098 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002099 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002100
Alexandre Ramesd921d642015-04-16 15:07:16 +01002101 MacroAssembler* masm = GetVIXLAssembler();
2102 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002103 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002104 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002105
Roland Levillain44015862016-01-22 11:47:17 +00002106 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2107 // Object ArrayGet with Baker's read barrier case.
2108 Register temp = temps.AcquireW();
Roland Levillain4a3aa572016-08-15 13:17:06 +00002109 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
2110 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Roland Levillain44015862016-01-22 11:47:17 +00002111 // Note that a potential implicit null check is handled in the
2112 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2113 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2114 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002115 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002116 // General case.
2117 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002118 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002119 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2120 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002121 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002122 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002123 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002124 // The read barrier instrumentation does not support the
2125 // HIntermediateAddress instruction yet.
2126 DCHECK(!kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00002127 // We do not need to compute the intermediate address from the array: the
2128 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002129 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002130 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002131 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002132 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2133 }
2134 temp = obj;
2135 } else {
2136 __ Add(temp, obj, offset);
2137 }
2138 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2139 }
2140
2141 codegen_->Load(type, OutputCPURegister(instruction), source);
2142 codegen_->MaybeRecordImplicitNullCheck(instruction);
2143
2144 if (type == Primitive::kPrimNot) {
2145 static_assert(
2146 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2147 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2148 Location obj_loc = locations->InAt(0);
2149 if (index.IsConstant()) {
2150 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2151 } else {
2152 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2153 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002154 }
Roland Levillain4d027112015-07-01 15:41:14 +01002155 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002156}
2157
Alexandre Rames5319def2014-10-23 10:03:10 +01002158void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2159 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2160 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002161 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002162}
2163
2164void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002165 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002166 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Markodce016e2016-04-28 13:10:02 +01002167 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002168 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002169}
2170
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002171void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002172 Primitive::Type value_type = instruction->GetComponentType();
2173
2174 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2175 bool object_array_set_with_read_barrier =
2176 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002177 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2178 instruction,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002179 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
2180 LocationSummary::kCallOnSlowPath :
2181 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002182 locations->SetInAt(0, Location::RequiresRegister());
2183 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002184 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2185 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2186 } else if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002187 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002188 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002189 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002190 }
2191}
2192
2193void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2194 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002195 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002196 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002197 bool needs_write_barrier =
2198 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002199
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002200 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002201 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002202 CPURegister source = value;
2203 Location index = locations->InAt(1);
2204 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2205 MemOperand destination = HeapOperand(array);
2206 MacroAssembler* masm = GetVIXLAssembler();
2207 BlockPoolsScope block_pools(masm);
2208
2209 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002210 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002211 if (index.IsConstant()) {
2212 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2213 destination = HeapOperand(array, offset);
2214 } else {
2215 UseScratchRegisterScope temps(masm);
2216 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002217 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002218 // The read barrier instrumentation does not support the
2219 // HIntermediateAddress instruction yet.
2220 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002221 // We do not need to compute the intermediate address from the array: the
2222 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002223 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002224 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002225 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002226 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2227 }
2228 temp = array;
2229 } else {
2230 __ Add(temp, array, offset);
2231 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002232 destination = HeapOperand(temp,
2233 XRegisterFrom(index),
2234 LSL,
2235 Primitive::ComponentSizeShift(value_type));
2236 }
2237 codegen_->Store(value_type, value, destination);
2238 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002239 } else {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002240 DCHECK(needs_write_barrier);
Artem Serov328429f2016-07-06 16:23:04 +01002241 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002242 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002243 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002244 {
2245 // We use a block to end the scratch scope before the write barrier, thus
2246 // freeing the temporary registers so they can be used in `MarkGCCard`.
2247 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002248 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002249 if (index.IsConstant()) {
2250 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002251 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002252 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002253 destination = HeapOperand(temp,
2254 XRegisterFrom(index),
2255 LSL,
2256 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002257 }
2258
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002259 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2260 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2261 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2262
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002263 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002264 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2265 codegen_->AddSlowPath(slow_path);
2266 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002267 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002268 __ Cbnz(Register(value), &non_zero);
2269 if (!index.IsConstant()) {
2270 __ Add(temp, array, offset);
2271 }
2272 __ Str(wzr, destination);
2273 codegen_->MaybeRecordImplicitNullCheck(instruction);
2274 __ B(&done);
2275 __ Bind(&non_zero);
2276 }
2277
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002278 if (kEmitCompilerReadBarrier) {
2279 // When read barriers are enabled, the type checking
2280 // instrumentation requires two read barriers:
2281 //
2282 // __ Mov(temp2, temp);
2283 // // /* HeapReference<Class> */ temp = temp->component_type_
2284 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002285 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002286 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2287 //
2288 // // /* HeapReference<Class> */ temp2 = value->klass_
2289 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002290 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002291 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2292 //
2293 // __ Cmp(temp, temp2);
2294 //
2295 // However, the second read barrier may trash `temp`, as it
2296 // is a temporary register, and as such would not be saved
2297 // along with live registers before calling the runtime (nor
2298 // restored afterwards). So in this case, we bail out and
2299 // delegate the work to the array set slow path.
2300 //
2301 // TODO: Extend the register allocator to support a new
2302 // "(locally) live temp" location so as to avoid always
2303 // going into the slow path when read barriers are enabled.
2304 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002305 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002306 Register temp2 = temps.AcquireSameSizeAs(array);
2307 // /* HeapReference<Class> */ temp = array->klass_
2308 __ Ldr(temp, HeapOperand(array, class_offset));
2309 codegen_->MaybeRecordImplicitNullCheck(instruction);
2310 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2311
2312 // /* HeapReference<Class> */ temp = temp->component_type_
2313 __ Ldr(temp, HeapOperand(temp, component_offset));
2314 // /* HeapReference<Class> */ temp2 = value->klass_
2315 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2316 // If heap poisoning is enabled, no need to unpoison `temp`
2317 // nor `temp2`, as we are comparing two poisoned references.
2318 __ Cmp(temp, temp2);
2319
2320 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002321 vixl::aarch64::Label do_put;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002322 __ B(eq, &do_put);
2323 // If heap poisoning is enabled, the `temp` reference has
2324 // not been unpoisoned yet; unpoison it now.
2325 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2326
2327 // /* HeapReference<Class> */ temp = temp->super_class_
2328 __ Ldr(temp, HeapOperand(temp, super_offset));
2329 // If heap poisoning is enabled, no need to unpoison
2330 // `temp`, as we are comparing against null below.
2331 __ Cbnz(temp, slow_path->GetEntryLabel());
2332 __ Bind(&do_put);
2333 } else {
2334 __ B(ne, slow_path->GetEntryLabel());
2335 }
2336 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002337 }
2338 }
2339
2340 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002341 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002342 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002343 __ Mov(temp2, value.W());
2344 GetAssembler()->PoisonHeapReference(temp2);
2345 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002346 }
2347
2348 if (!index.IsConstant()) {
2349 __ Add(temp, array, offset);
2350 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002351 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002352
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002353 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002354 codegen_->MaybeRecordImplicitNullCheck(instruction);
2355 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002356 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002357
2358 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2359
2360 if (done.IsLinked()) {
2361 __ Bind(&done);
2362 }
2363
2364 if (slow_path != nullptr) {
2365 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002366 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002367 }
2368}
2369
Alexandre Rames67555f72014-11-18 10:55:16 +00002370void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002371 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2372 ? LocationSummary::kCallOnSlowPath
2373 : LocationSummary::kNoCall;
2374 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002375 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002376 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002377 if (instruction->HasUses()) {
2378 locations->SetOut(Location::SameAsFirstInput());
2379 }
2380}
2381
2382void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002383 BoundsCheckSlowPathARM64* slow_path =
2384 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002385 codegen_->AddSlowPath(slow_path);
2386
2387 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2388 __ B(slow_path->GetEntryLabel(), hs);
2389}
2390
Alexandre Rames67555f72014-11-18 10:55:16 +00002391void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2392 LocationSummary* locations =
2393 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2394 locations->SetInAt(0, Location::RequiresRegister());
2395 if (check->HasUses()) {
2396 locations->SetOut(Location::SameAsFirstInput());
2397 }
2398}
2399
2400void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2401 // We assume the class is not null.
2402 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2403 check->GetLoadClass(), check, check->GetDexPc(), true);
2404 codegen_->AddSlowPath(slow_path);
2405 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2406}
2407
Roland Levillain1a653882016-03-18 18:05:57 +00002408static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2409 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2410 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2411}
2412
2413void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2414 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2415 Location rhs_loc = instruction->GetLocations()->InAt(1);
2416 if (rhs_loc.IsConstant()) {
2417 // 0.0 is the only immediate that can be encoded directly in
2418 // an FCMP instruction.
2419 //
2420 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2421 // specify that in a floating-point comparison, positive zero
2422 // and negative zero are considered equal, so we can use the
2423 // literal 0.0 for both cases here.
2424 //
2425 // Note however that some methods (Float.equal, Float.compare,
2426 // Float.compareTo, Double.equal, Double.compare,
2427 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2428 // StrictMath.min) consider 0.0 to be (strictly) greater than
2429 // -0.0. So if we ever translate calls to these methods into a
2430 // HCompare instruction, we must handle the -0.0 case with
2431 // care here.
2432 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2433 __ Fcmp(lhs_reg, 0.0);
2434 } else {
2435 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2436 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002437}
2438
Serban Constantinescu02164b32014-11-13 14:05:07 +00002439void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002440 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002441 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2442 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002443 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002444 case Primitive::kPrimBoolean:
2445 case Primitive::kPrimByte:
2446 case Primitive::kPrimShort:
2447 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002448 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002449 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002450 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002451 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002452 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2453 break;
2454 }
2455 case Primitive::kPrimFloat:
2456 case Primitive::kPrimDouble: {
2457 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002458 locations->SetInAt(1,
2459 IsFloatingPointZeroConstant(compare->InputAt(1))
2460 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2461 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002462 locations->SetOut(Location::RequiresRegister());
2463 break;
2464 }
2465 default:
2466 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2467 }
2468}
2469
2470void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2471 Primitive::Type in_type = compare->InputAt(0)->GetType();
2472
2473 // 0 if: left == right
2474 // 1 if: left > right
2475 // -1 if: left < right
2476 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002477 case Primitive::kPrimBoolean:
2478 case Primitive::kPrimByte:
2479 case Primitive::kPrimShort:
2480 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002481 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002482 case Primitive::kPrimLong: {
2483 Register result = OutputRegister(compare);
2484 Register left = InputRegisterAt(compare, 0);
2485 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002486 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002487 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2488 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002489 break;
2490 }
2491 case Primitive::kPrimFloat:
2492 case Primitive::kPrimDouble: {
2493 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002494 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002495 __ Cset(result, ne);
2496 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002497 break;
2498 }
2499 default:
2500 LOG(FATAL) << "Unimplemented compare type " << in_type;
2501 }
2502}
2503
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002504void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002505 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002506
2507 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2508 locations->SetInAt(0, Location::RequiresFpuRegister());
2509 locations->SetInAt(1,
2510 IsFloatingPointZeroConstant(instruction->InputAt(1))
2511 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2512 : Location::RequiresFpuRegister());
2513 } else {
2514 // Integer cases.
2515 locations->SetInAt(0, Location::RequiresRegister());
2516 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2517 }
2518
David Brazdilb3e773e2016-01-26 11:28:37 +00002519 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002520 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002521 }
2522}
2523
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002524void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002525 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002526 return;
2527 }
2528
2529 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002530 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002531 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002532
Roland Levillain7f63c522015-07-13 15:54:55 +00002533 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002534 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002535 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002536 } else {
2537 // Integer cases.
2538 Register lhs = InputRegisterAt(instruction, 0);
2539 Operand rhs = InputOperandAt(instruction, 1);
2540 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002541 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002542 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002543}
2544
2545#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2546 M(Equal) \
2547 M(NotEqual) \
2548 M(LessThan) \
2549 M(LessThanOrEqual) \
2550 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002551 M(GreaterThanOrEqual) \
2552 M(Below) \
2553 M(BelowOrEqual) \
2554 M(Above) \
2555 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002556#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002557void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2558void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002559FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002560#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002561#undef FOR_EACH_CONDITION_INSTRUCTION
2562
Zheng Xuc6667102015-05-15 16:08:45 +08002563void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2564 DCHECK(instruction->IsDiv() || instruction->IsRem());
2565
2566 LocationSummary* locations = instruction->GetLocations();
2567 Location second = locations->InAt(1);
2568 DCHECK(second.IsConstant());
2569
2570 Register out = OutputRegister(instruction);
2571 Register dividend = InputRegisterAt(instruction, 0);
2572 int64_t imm = Int64FromConstant(second.GetConstant());
2573 DCHECK(imm == 1 || imm == -1);
2574
2575 if (instruction->IsRem()) {
2576 __ Mov(out, 0);
2577 } else {
2578 if (imm == 1) {
2579 __ Mov(out, dividend);
2580 } else {
2581 __ Neg(out, dividend);
2582 }
2583 }
2584}
2585
2586void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2587 DCHECK(instruction->IsDiv() || instruction->IsRem());
2588
2589 LocationSummary* locations = instruction->GetLocations();
2590 Location second = locations->InAt(1);
2591 DCHECK(second.IsConstant());
2592
2593 Register out = OutputRegister(instruction);
2594 Register dividend = InputRegisterAt(instruction, 0);
2595 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002596 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002597 int ctz_imm = CTZ(abs_imm);
2598
2599 UseScratchRegisterScope temps(GetVIXLAssembler());
2600 Register temp = temps.AcquireSameSizeAs(out);
2601
2602 if (instruction->IsDiv()) {
2603 __ Add(temp, dividend, abs_imm - 1);
2604 __ Cmp(dividend, 0);
2605 __ Csel(out, temp, dividend, lt);
2606 if (imm > 0) {
2607 __ Asr(out, out, ctz_imm);
2608 } else {
2609 __ Neg(out, Operand(out, ASR, ctz_imm));
2610 }
2611 } else {
2612 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2613 __ Asr(temp, dividend, bits - 1);
2614 __ Lsr(temp, temp, bits - ctz_imm);
2615 __ Add(out, dividend, temp);
2616 __ And(out, out, abs_imm - 1);
2617 __ Sub(out, out, temp);
2618 }
2619}
2620
2621void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2622 DCHECK(instruction->IsDiv() || instruction->IsRem());
2623
2624 LocationSummary* locations = instruction->GetLocations();
2625 Location second = locations->InAt(1);
2626 DCHECK(second.IsConstant());
2627
2628 Register out = OutputRegister(instruction);
2629 Register dividend = InputRegisterAt(instruction, 0);
2630 int64_t imm = Int64FromConstant(second.GetConstant());
2631
2632 Primitive::Type type = instruction->GetResultType();
2633 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2634
2635 int64_t magic;
2636 int shift;
2637 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2638
2639 UseScratchRegisterScope temps(GetVIXLAssembler());
2640 Register temp = temps.AcquireSameSizeAs(out);
2641
2642 // temp = get_high(dividend * magic)
2643 __ Mov(temp, magic);
2644 if (type == Primitive::kPrimLong) {
2645 __ Smulh(temp, dividend, temp);
2646 } else {
2647 __ Smull(temp.X(), dividend, temp);
2648 __ Lsr(temp.X(), temp.X(), 32);
2649 }
2650
2651 if (imm > 0 && magic < 0) {
2652 __ Add(temp, temp, dividend);
2653 } else if (imm < 0 && magic > 0) {
2654 __ Sub(temp, temp, dividend);
2655 }
2656
2657 if (shift != 0) {
2658 __ Asr(temp, temp, shift);
2659 }
2660
2661 if (instruction->IsDiv()) {
2662 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2663 } else {
2664 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2665 // TODO: Strength reduction for msub.
2666 Register temp_imm = temps.AcquireSameSizeAs(out);
2667 __ Mov(temp_imm, imm);
2668 __ Msub(out, temp, temp_imm, dividend);
2669 }
2670}
2671
2672void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2673 DCHECK(instruction->IsDiv() || instruction->IsRem());
2674 Primitive::Type type = instruction->GetResultType();
2675 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2676
2677 LocationSummary* locations = instruction->GetLocations();
2678 Register out = OutputRegister(instruction);
2679 Location second = locations->InAt(1);
2680
2681 if (second.IsConstant()) {
2682 int64_t imm = Int64FromConstant(second.GetConstant());
2683
2684 if (imm == 0) {
2685 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2686 } else if (imm == 1 || imm == -1) {
2687 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002688 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002689 DivRemByPowerOfTwo(instruction);
2690 } else {
2691 DCHECK(imm <= -2 || imm >= 2);
2692 GenerateDivRemWithAnyConstant(instruction);
2693 }
2694 } else {
2695 Register dividend = InputRegisterAt(instruction, 0);
2696 Register divisor = InputRegisterAt(instruction, 1);
2697 if (instruction->IsDiv()) {
2698 __ Sdiv(out, dividend, divisor);
2699 } else {
2700 UseScratchRegisterScope temps(GetVIXLAssembler());
2701 Register temp = temps.AcquireSameSizeAs(out);
2702 __ Sdiv(temp, dividend, divisor);
2703 __ Msub(out, temp, divisor, dividend);
2704 }
2705 }
2706}
2707
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002708void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2709 LocationSummary* locations =
2710 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2711 switch (div->GetResultType()) {
2712 case Primitive::kPrimInt:
2713 case Primitive::kPrimLong:
2714 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002715 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002716 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2717 break;
2718
2719 case Primitive::kPrimFloat:
2720 case Primitive::kPrimDouble:
2721 locations->SetInAt(0, Location::RequiresFpuRegister());
2722 locations->SetInAt(1, Location::RequiresFpuRegister());
2723 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2724 break;
2725
2726 default:
2727 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2728 }
2729}
2730
2731void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2732 Primitive::Type type = div->GetResultType();
2733 switch (type) {
2734 case Primitive::kPrimInt:
2735 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002736 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002737 break;
2738
2739 case Primitive::kPrimFloat:
2740 case Primitive::kPrimDouble:
2741 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2742 break;
2743
2744 default:
2745 LOG(FATAL) << "Unexpected div type " << type;
2746 }
2747}
2748
Alexandre Rames67555f72014-11-18 10:55:16 +00002749void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002750 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2751 ? LocationSummary::kCallOnSlowPath
2752 : LocationSummary::kNoCall;
2753 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002754 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2755 if (instruction->HasUses()) {
2756 locations->SetOut(Location::SameAsFirstInput());
2757 }
2758}
2759
2760void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2761 SlowPathCodeARM64* slow_path =
2762 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2763 codegen_->AddSlowPath(slow_path);
2764 Location value = instruction->GetLocations()->InAt(0);
2765
Alexandre Rames3e69f162014-12-10 10:36:50 +00002766 Primitive::Type type = instruction->GetType();
2767
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002768 if (!Primitive::IsIntegralType(type)) {
2769 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002770 return;
2771 }
2772
Alexandre Rames67555f72014-11-18 10:55:16 +00002773 if (value.IsConstant()) {
2774 int64_t divisor = Int64ConstantFrom(value);
2775 if (divisor == 0) {
2776 __ B(slow_path->GetEntryLabel());
2777 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002778 // A division by a non-null constant is valid. We don't need to perform
2779 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002780 }
2781 } else {
2782 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2783 }
2784}
2785
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002786void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2787 LocationSummary* locations =
2788 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2789 locations->SetOut(Location::ConstantLocation(constant));
2790}
2791
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002792void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2793 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002794 // Will be generated at use site.
2795}
2796
Alexandre Rames5319def2014-10-23 10:03:10 +01002797void LocationsBuilderARM64::VisitExit(HExit* exit) {
2798 exit->SetLocations(nullptr);
2799}
2800
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002801void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002802}
2803
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002804void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2805 LocationSummary* locations =
2806 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2807 locations->SetOut(Location::ConstantLocation(constant));
2808}
2809
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002810void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002811 // Will be generated at use site.
2812}
2813
David Brazdilfc6a86a2015-06-26 10:33:45 +00002814void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002815 DCHECK(!successor->IsExitBlock());
2816 HBasicBlock* block = got->GetBlock();
2817 HInstruction* previous = got->GetPrevious();
2818 HLoopInformation* info = block->GetLoopInformation();
2819
David Brazdil46e2a392015-03-16 17:31:52 +00002820 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002821 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2822 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2823 return;
2824 }
2825 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2826 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2827 }
2828 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002829 __ B(codegen_->GetLabelOf(successor));
2830 }
2831}
2832
David Brazdilfc6a86a2015-06-26 10:33:45 +00002833void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2834 got->SetLocations(nullptr);
2835}
2836
2837void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2838 HandleGoto(got, got->GetSuccessor());
2839}
2840
2841void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2842 try_boundary->SetLocations(nullptr);
2843}
2844
2845void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2846 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2847 if (!successor->IsExitBlock()) {
2848 HandleGoto(try_boundary, successor);
2849 }
2850}
2851
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002852void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002853 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002854 vixl::aarch64::Label* true_target,
2855 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002856 // FP branching requires both targets to be explicit. If either of the targets
2857 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002858 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002859 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002860
David Brazdil0debae72015-11-12 18:37:00 +00002861 if (true_target == nullptr && false_target == nullptr) {
2862 // Nothing to do. The code always falls through.
2863 return;
2864 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002865 // Constant condition, statically compared against "true" (integer value 1).
2866 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002867 if (true_target != nullptr) {
2868 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002869 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002870 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002871 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002872 if (false_target != nullptr) {
2873 __ B(false_target);
2874 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002875 }
David Brazdil0debae72015-11-12 18:37:00 +00002876 return;
2877 }
2878
2879 // The following code generates these patterns:
2880 // (1) true_target == nullptr && false_target != nullptr
2881 // - opposite condition true => branch to false_target
2882 // (2) true_target != nullptr && false_target == nullptr
2883 // - condition true => branch to true_target
2884 // (3) true_target != nullptr && false_target != nullptr
2885 // - condition true => branch to true_target
2886 // - branch to false_target
2887 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002888 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002889 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002890 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002891 if (true_target == nullptr) {
2892 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2893 } else {
2894 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2895 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002896 } else {
2897 // The condition instruction has not been materialized, use its inputs as
2898 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002899 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002900
David Brazdil0debae72015-11-12 18:37:00 +00002901 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002902 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002903 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002904 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002905 IfCondition opposite_condition = condition->GetOppositeCondition();
2906 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002907 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002908 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002909 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002910 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002911 // Integer cases.
2912 Register lhs = InputRegisterAt(condition, 0);
2913 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002914
2915 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01002916 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002917 if (true_target == nullptr) {
2918 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2919 non_fallthrough_target = false_target;
2920 } else {
2921 arm64_cond = ARM64Condition(condition->GetCondition());
2922 non_fallthrough_target = true_target;
2923 }
2924
Aart Bik086d27e2016-01-20 17:02:00 -08002925 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01002926 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002927 switch (arm64_cond) {
2928 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002929 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002930 break;
2931 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002932 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002933 break;
2934 case lt:
2935 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002936 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002937 break;
2938 case ge:
2939 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002940 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002941 break;
2942 default:
2943 // Without the `static_cast` the compiler throws an error for
2944 // `-Werror=sign-promo`.
2945 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2946 }
2947 } else {
2948 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002949 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002950 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002951 }
2952 }
David Brazdil0debae72015-11-12 18:37:00 +00002953
2954 // If neither branch falls through (case 3), the conditional branch to `true_target`
2955 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2956 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002957 __ B(false_target);
2958 }
David Brazdil0debae72015-11-12 18:37:00 +00002959
2960 if (fallthrough_target.IsLinked()) {
2961 __ Bind(&fallthrough_target);
2962 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002963}
2964
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002965void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2966 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002967 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002968 locations->SetInAt(0, Location::RequiresRegister());
2969 }
2970}
2971
2972void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002973 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2974 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002975 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
2976 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
2977 true_target = nullptr;
2978 }
2979 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
2980 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
2981 false_target = nullptr;
2982 }
David Brazdil0debae72015-11-12 18:37:00 +00002983 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002984}
2985
2986void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2987 LocationSummary* locations = new (GetGraph()->GetArena())
2988 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002989 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002990 locations->SetInAt(0, Location::RequiresRegister());
2991 }
2992}
2993
2994void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002995 SlowPathCodeARM64* slow_path =
2996 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002997 GenerateTestAndBranch(deoptimize,
2998 /* condition_input_index */ 0,
2999 slow_path->GetEntryLabel(),
3000 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003001}
3002
David Brazdilc0b601b2016-02-08 14:20:45 +00003003static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3004 return condition->IsCondition() &&
3005 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
3006}
3007
Alexandre Rames880f1192016-06-13 16:04:50 +01003008static inline Condition GetConditionForSelect(HCondition* condition) {
3009 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003010 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3011 : ARM64Condition(cond);
3012}
3013
David Brazdil74eb1b22015-12-14 11:44:01 +00003014void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3015 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01003016 if (Primitive::IsFloatingPointType(select->GetType())) {
3017 locations->SetInAt(0, Location::RequiresFpuRegister());
3018 locations->SetInAt(1, Location::RequiresFpuRegister());
3019 locations->SetOut(Location::RequiresFpuRegister());
3020 } else {
3021 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3022 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3023 bool is_true_value_constant = cst_true_value != nullptr;
3024 bool is_false_value_constant = cst_false_value != nullptr;
3025 // Ask VIXL whether we should synthesize constants in registers.
3026 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3027 Operand true_op = is_true_value_constant ?
3028 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3029 Operand false_op = is_false_value_constant ?
3030 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3031 bool true_value_in_register = false;
3032 bool false_value_in_register = false;
3033 MacroAssembler::GetCselSynthesisInformation(
3034 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3035 true_value_in_register |= !is_true_value_constant;
3036 false_value_in_register |= !is_false_value_constant;
3037
3038 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3039 : Location::ConstantLocation(cst_true_value));
3040 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3041 : Location::ConstantLocation(cst_false_value));
3042 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003043 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003044
David Brazdil74eb1b22015-12-14 11:44:01 +00003045 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3046 locations->SetInAt(2, Location::RequiresRegister());
3047 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003048}
3049
3050void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003051 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003052 Condition csel_cond;
3053
3054 if (IsBooleanValueOrMaterializedCondition(cond)) {
3055 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003056 // Use the condition flags set by the previous instruction.
3057 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003058 } else {
3059 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003060 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003061 }
3062 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003063 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003064 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003065 } else {
3066 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003067 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003068 }
3069
Alexandre Rames880f1192016-06-13 16:04:50 +01003070 if (Primitive::IsFloatingPointType(select->GetType())) {
3071 __ Fcsel(OutputFPRegister(select),
3072 InputFPRegisterAt(select, 1),
3073 InputFPRegisterAt(select, 0),
3074 csel_cond);
3075 } else {
3076 __ Csel(OutputRegister(select),
3077 InputOperandAt(select, 1),
3078 InputOperandAt(select, 0),
3079 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003080 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003081}
3082
David Srbecky0cf44932015-12-09 14:09:59 +00003083void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3084 new (GetGraph()->GetArena()) LocationSummary(info);
3085}
3086
David Srbeckyd28f4a02016-03-14 17:14:24 +00003087void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3088 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003089}
3090
3091void CodeGeneratorARM64::GenerateNop() {
3092 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003093}
3094
Alexandre Rames5319def2014-10-23 10:03:10 +01003095void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003096 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003097}
3098
3099void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003100 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003101}
3102
3103void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003104 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003105}
3106
3107void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003108 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003109}
3110
Roland Levillain44015862016-01-22 11:47:17 +00003111static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3112 return kEmitCompilerReadBarrier &&
3113 (kUseBakerReadBarrier ||
3114 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3115 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3116 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3117}
3118
Alexandre Rames67555f72014-11-18 10:55:16 +00003119void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003120 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003121 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3122 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003123 case TypeCheckKind::kExactCheck:
3124 case TypeCheckKind::kAbstractClassCheck:
3125 case TypeCheckKind::kClassHierarchyCheck:
3126 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003127 call_kind =
3128 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003129 break;
3130 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003131 case TypeCheckKind::kUnresolvedCheck:
3132 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003133 call_kind = LocationSummary::kCallOnSlowPath;
3134 break;
3135 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003136
Alexandre Rames67555f72014-11-18 10:55:16 +00003137 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003138 locations->SetInAt(0, Location::RequiresRegister());
3139 locations->SetInAt(1, Location::RequiresRegister());
3140 // The "out" register is used as a temporary, so it overlaps with the inputs.
3141 // Note that TypeCheckSlowPathARM64 uses this register too.
3142 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3143 // When read barriers are enabled, we need a temporary register for
3144 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003145 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003146 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003147 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003148}
3149
3150void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003151 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003152 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003153 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003154 Register obj = InputRegisterAt(instruction, 0);
3155 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003156 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003157 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003158 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3159 locations->GetTemp(0) :
3160 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003161 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3162 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3163 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3164 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003165
Scott Wakeling97c72b72016-06-24 16:19:36 +01003166 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003167 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003168
3169 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003170 // Avoid null check if we know `obj` is not null.
3171 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003172 __ Cbz(obj, &zero);
3173 }
3174
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003175 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003176 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003177
Roland Levillain44015862016-01-22 11:47:17 +00003178 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003179 case TypeCheckKind::kExactCheck: {
3180 __ Cmp(out, cls);
3181 __ Cset(out, eq);
3182 if (zero.IsLinked()) {
3183 __ B(&done);
3184 }
3185 break;
3186 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003187
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003188 case TypeCheckKind::kAbstractClassCheck: {
3189 // If the class is abstract, we eagerly fetch the super class of the
3190 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003191 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003192 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003193 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003194 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003195 // If `out` is null, we use it for the result, and jump to `done`.
3196 __ Cbz(out, &done);
3197 __ Cmp(out, cls);
3198 __ B(ne, &loop);
3199 __ Mov(out, 1);
3200 if (zero.IsLinked()) {
3201 __ B(&done);
3202 }
3203 break;
3204 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003205
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003206 case TypeCheckKind::kClassHierarchyCheck: {
3207 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003208 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003209 __ Bind(&loop);
3210 __ Cmp(out, cls);
3211 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003212 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003213 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003214 __ Cbnz(out, &loop);
3215 // If `out` is null, we use it for the result, and jump to `done`.
3216 __ B(&done);
3217 __ Bind(&success);
3218 __ Mov(out, 1);
3219 if (zero.IsLinked()) {
3220 __ B(&done);
3221 }
3222 break;
3223 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003224
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003225 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003226 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003227 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003228 __ Cmp(out, cls);
3229 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003230 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003231 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003232 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003233 // If `out` is null, we use it for the result, and jump to `done`.
3234 __ Cbz(out, &done);
3235 __ Ldrh(out, HeapOperand(out, primitive_offset));
3236 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3237 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003238 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003239 __ Mov(out, 1);
3240 __ B(&done);
3241 break;
3242 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003243
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003244 case TypeCheckKind::kArrayCheck: {
3245 __ Cmp(out, cls);
3246 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003247 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3248 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003249 codegen_->AddSlowPath(slow_path);
3250 __ B(ne, slow_path->GetEntryLabel());
3251 __ Mov(out, 1);
3252 if (zero.IsLinked()) {
3253 __ B(&done);
3254 }
3255 break;
3256 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003257
Calin Juravle98893e12015-10-02 21:05:03 +01003258 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003259 case TypeCheckKind::kInterfaceCheck: {
3260 // Note that we indeed only call on slow path, but we always go
3261 // into the slow path for the unresolved and interface check
3262 // cases.
3263 //
3264 // We cannot directly call the InstanceofNonTrivial runtime
3265 // entry point without resorting to a type checking slow path
3266 // here (i.e. by calling InvokeRuntime directly), as it would
3267 // require to assign fixed registers for the inputs of this
3268 // HInstanceOf instruction (following the runtime calling
3269 // convention), which might be cluttered by the potential first
3270 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003271 //
3272 // TODO: Introduce a new runtime entry point taking the object
3273 // to test (instead of its class) as argument, and let it deal
3274 // with the read barrier issues. This will let us refactor this
3275 // case of the `switch` code as it was previously (with a direct
3276 // call to the runtime not using a type checking slow path).
3277 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003278 DCHECK(locations->OnlyCallsOnSlowPath());
3279 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3280 /* is_fatal */ false);
3281 codegen_->AddSlowPath(slow_path);
3282 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003283 if (zero.IsLinked()) {
3284 __ B(&done);
3285 }
3286 break;
3287 }
3288 }
3289
3290 if (zero.IsLinked()) {
3291 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003292 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003293 }
3294
3295 if (done.IsLinked()) {
3296 __ Bind(&done);
3297 }
3298
3299 if (slow_path != nullptr) {
3300 __ Bind(slow_path->GetExitLabel());
3301 }
3302}
3303
3304void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3305 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3306 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3307
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003308 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3309 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003310 case TypeCheckKind::kExactCheck:
3311 case TypeCheckKind::kAbstractClassCheck:
3312 case TypeCheckKind::kClassHierarchyCheck:
3313 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003314 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3315 LocationSummary::kCallOnSlowPath :
3316 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003317 break;
3318 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003319 case TypeCheckKind::kUnresolvedCheck:
3320 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003321 call_kind = LocationSummary::kCallOnSlowPath;
3322 break;
3323 }
3324
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003325 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3326 locations->SetInAt(0, Location::RequiresRegister());
3327 locations->SetInAt(1, Location::RequiresRegister());
3328 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3329 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003330 // When read barriers are enabled, we need an additional temporary
3331 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003332 if (TypeCheckNeedsATemporary(type_check_kind)) {
3333 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003334 }
3335}
3336
3337void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003338 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003339 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003340 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003341 Register obj = InputRegisterAt(instruction, 0);
3342 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003343 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003344 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3345 locations->GetTemp(1) :
3346 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003347 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003348 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3349 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3350 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3351 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003352
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003353 bool is_type_check_slow_path_fatal =
3354 (type_check_kind == TypeCheckKind::kExactCheck ||
3355 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3356 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3357 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3358 !instruction->CanThrowIntoCatchBlock();
3359 SlowPathCodeARM64* type_check_slow_path =
3360 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3361 is_type_check_slow_path_fatal);
3362 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003363
Scott Wakeling97c72b72016-06-24 16:19:36 +01003364 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003365 // Avoid null check if we know obj is not null.
3366 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003367 __ Cbz(obj, &done);
3368 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003369
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003370 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003371 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003372
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003373 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003374 case TypeCheckKind::kExactCheck:
3375 case TypeCheckKind::kArrayCheck: {
3376 __ Cmp(temp, cls);
3377 // Jump to slow path for throwing the exception or doing a
3378 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003379 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003380 break;
3381 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003382
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003383 case TypeCheckKind::kAbstractClassCheck: {
3384 // If the class is abstract, we eagerly fetch the super class of the
3385 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003386 vixl::aarch64::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003387 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003388 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003389 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003390
3391 // If the class reference currently in `temp` is not null, jump
3392 // to the `compare_classes` label to compare it with the checked
3393 // class.
3394 __ Cbnz(temp, &compare_classes);
3395 // Otherwise, jump to the slow path to throw the exception.
3396 //
3397 // But before, move back the object's class into `temp` before
3398 // going into the slow path, as it has been overwritten in the
3399 // meantime.
3400 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003401 GenerateReferenceLoadTwoRegisters(
3402 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003403 __ B(type_check_slow_path->GetEntryLabel());
3404
3405 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003406 __ Cmp(temp, cls);
3407 __ B(ne, &loop);
3408 break;
3409 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003410
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003411 case TypeCheckKind::kClassHierarchyCheck: {
3412 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003413 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003414 __ Bind(&loop);
3415 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003416 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003417
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003418 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003419 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003420
3421 // If the class reference currently in `temp` is not null, jump
3422 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003423 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003424 // Otherwise, jump to the slow path to throw the exception.
3425 //
3426 // But before, move back the object's class into `temp` before
3427 // going into the slow path, as it has been overwritten in the
3428 // meantime.
3429 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003430 GenerateReferenceLoadTwoRegisters(
3431 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003432 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003433 break;
3434 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003435
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003436 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003437 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003438 vixl::aarch64::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003439 __ Cmp(temp, cls);
3440 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003441
3442 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003443 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003444 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003445
3446 // If the component type is not null (i.e. the object is indeed
3447 // an array), jump to label `check_non_primitive_component_type`
3448 // to further check that this component type is not a primitive
3449 // type.
3450 __ Cbnz(temp, &check_non_primitive_component_type);
3451 // Otherwise, jump to the slow path to throw the exception.
3452 //
3453 // But before, move back the object's class into `temp` before
3454 // going into the slow path, as it has been overwritten in the
3455 // meantime.
3456 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003457 GenerateReferenceLoadTwoRegisters(
3458 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003459 __ B(type_check_slow_path->GetEntryLabel());
3460
3461 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003462 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3463 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003464 __ Cbz(temp, &done);
3465 // Same comment as above regarding `temp` and the slow path.
3466 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003467 GenerateReferenceLoadTwoRegisters(
3468 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003469 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003470 break;
3471 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003472
Calin Juravle98893e12015-10-02 21:05:03 +01003473 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003474 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003475 // We always go into the type check slow path for the unresolved
3476 // and interface check cases.
3477 //
3478 // We cannot directly call the CheckCast runtime entry point
3479 // without resorting to a type checking slow path here (i.e. by
3480 // calling InvokeRuntime directly), as it would require to
3481 // assign fixed registers for the inputs of this HInstanceOf
3482 // instruction (following the runtime calling convention), which
3483 // might be cluttered by the potential first read barrier
3484 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003485 //
3486 // TODO: Introduce a new runtime entry point taking the object
3487 // to test (instead of its class) as argument, and let it deal
3488 // with the read barrier issues. This will let us refactor this
3489 // case of the `switch` code as it was previously (with a direct
3490 // call to the runtime not using a type checking slow path).
3491 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003492 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003493 break;
3494 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003495 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003496
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003497 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003498}
3499
Alexandre Rames5319def2014-10-23 10:03:10 +01003500void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3501 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3502 locations->SetOut(Location::ConstantLocation(constant));
3503}
3504
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003505void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003506 // Will be generated at use site.
3507}
3508
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003509void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3510 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3511 locations->SetOut(Location::ConstantLocation(constant));
3512}
3513
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003514void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003515 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003516}
3517
Calin Juravle175dc732015-08-25 15:42:32 +01003518void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3519 // The trampoline uses the same calling convention as dex calling conventions,
3520 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3521 // the method_idx.
3522 HandleInvoke(invoke);
3523}
3524
3525void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3526 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3527}
3528
Alexandre Rames5319def2014-10-23 10:03:10 +01003529void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003530 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003531 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003532}
3533
Alexandre Rames67555f72014-11-18 10:55:16 +00003534void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3535 HandleInvoke(invoke);
3536}
3537
3538void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3539 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003540 LocationSummary* locations = invoke->GetLocations();
3541 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003542 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003543 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003544 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003545
3546 // The register ip1 is required to be used for the hidden argument in
3547 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003548 MacroAssembler* masm = GetVIXLAssembler();
3549 UseScratchRegisterScope scratch_scope(masm);
3550 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003551 scratch_scope.Exclude(ip1);
3552 __ Mov(ip1, invoke->GetDexMethodIndex());
3553
Alexandre Rames67555f72014-11-18 10:55:16 +00003554 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003555 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003556 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003557 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003558 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003559 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003560 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003561 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003562 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003563 // Instead of simply (possibly) unpoisoning `temp` here, we should
3564 // emit a read barrier for the previous class reference load.
3565 // However this is not required in practice, as this is an
3566 // intermediate/temporary reference and because the current
3567 // concurrent copying collector keeps the from-space memory
3568 // intact/accessible until the end of the marking phase (the
3569 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003570 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003571 __ Ldr(temp,
3572 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3573 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003574 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003575 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003576 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003577 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003578 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003579 // lr();
3580 __ Blr(lr);
3581 DCHECK(!codegen_->IsLeafMethod());
3582 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3583}
3584
3585void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003586 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3587 if (intrinsic.TryDispatch(invoke)) {
3588 return;
3589 }
3590
Alexandre Rames67555f72014-11-18 10:55:16 +00003591 HandleInvoke(invoke);
3592}
3593
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003594void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003595 // Explicit clinit checks triggered by static invokes must have been pruned by
3596 // art::PrepareForRegisterAllocation.
3597 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003598
Andreas Gampe878d58c2015-01-15 23:24:00 -08003599 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3600 if (intrinsic.TryDispatch(invoke)) {
3601 return;
3602 }
3603
Alexandre Rames67555f72014-11-18 10:55:16 +00003604 HandleInvoke(invoke);
3605}
3606
Andreas Gampe878d58c2015-01-15 23:24:00 -08003607static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3608 if (invoke->GetLocations()->Intrinsified()) {
3609 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3610 intrinsic.Dispatch(invoke);
3611 return true;
3612 }
3613 return false;
3614}
3615
Vladimir Markodc151b22015-10-15 18:02:30 +01003616HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3617 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3618 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003619 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003620 return desired_dispatch_info;
3621}
3622
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003623void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003624 // For better instruction scheduling we load the direct code pointer before the method pointer.
3625 bool direct_code_loaded = false;
3626 switch (invoke->GetCodePtrLocation()) {
3627 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3628 // LR = code address from literal pool with link-time patch.
3629 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3630 direct_code_loaded = true;
3631 break;
3632 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3633 // LR = invoke->GetDirectCodePtr();
3634 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3635 direct_code_loaded = true;
3636 break;
3637 default:
3638 break;
3639 }
3640
Andreas Gampe878d58c2015-01-15 23:24:00 -08003641 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003642 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3643 switch (invoke->GetMethodLoadKind()) {
3644 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3645 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003646 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003647 break;
3648 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003649 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003650 break;
3651 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3652 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003653 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003654 break;
3655 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3656 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003657 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003658 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3659 break;
3660 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3661 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003662 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3663 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003664 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003665 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003666 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003667 __ Bind(adrp_label);
3668 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003669 }
Vladimir Marko58155012015-08-19 12:49:41 +00003670 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003671 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003672 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003673 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003674 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003675 __ Bind(ldr_label);
3676 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003677 }
Vladimir Marko58155012015-08-19 12:49:41 +00003678 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003679 }
Vladimir Marko58155012015-08-19 12:49:41 +00003680 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003681 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003682 Register reg = XRegisterFrom(temp);
3683 Register method_reg;
3684 if (current_method.IsRegister()) {
3685 method_reg = XRegisterFrom(current_method);
3686 } else {
3687 DCHECK(invoke->GetLocations()->Intrinsified());
3688 DCHECK(!current_method.IsValid());
3689 method_reg = reg;
3690 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3691 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003692
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003693 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003694 __ Ldr(reg.X(),
3695 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07003696 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003697 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003698 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3699 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003700 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3701 break;
3702 }
3703 }
3704
3705 switch (invoke->GetCodePtrLocation()) {
3706 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3707 __ Bl(&frame_entry_label_);
3708 break;
3709 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3710 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003711 vixl::aarch64::Label* label = &relative_call_patches_.back().label;
3712 SingleEmissionCheckScope guard(GetVIXLAssembler());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003713 __ Bind(label);
3714 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003715 break;
3716 }
3717 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3718 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3719 // LR prepared above for better instruction scheduling.
3720 DCHECK(direct_code_loaded);
3721 // lr()
3722 __ Blr(lr);
3723 break;
3724 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3725 // LR = callee_method->entry_point_from_quick_compiled_code_;
3726 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003727 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07003728 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003729 // lr()
3730 __ Blr(lr);
3731 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003732 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003733
Andreas Gampe878d58c2015-01-15 23:24:00 -08003734 DCHECK(!IsLeafMethod());
3735}
3736
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003737void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003738 // Use the calling convention instead of the location of the receiver, as
3739 // intrinsics may have put the receiver in a different register. In the intrinsics
3740 // slow path, the arguments have been moved to the right place, so here we are
3741 // guaranteed that the receiver is the first register of the calling convention.
3742 InvokeDexCallingConvention calling_convention;
3743 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003744 Register temp = XRegisterFrom(temp_in);
3745 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3746 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3747 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003748 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003749
3750 BlockPoolsScope block_pools(GetVIXLAssembler());
3751
3752 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003753 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003754 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003755 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003756 // Instead of simply (possibly) unpoisoning `temp` here, we should
3757 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003758 // intermediate/temporary reference and because the current
3759 // concurrent copying collector keeps the from-space memory
3760 // intact/accessible until the end of the marking phase (the
3761 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003762 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3763 // temp = temp->GetMethodAt(method_offset);
3764 __ Ldr(temp, MemOperand(temp, method_offset));
3765 // lr = temp->GetEntryPoint();
3766 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3767 // lr();
3768 __ Blr(lr);
3769}
3770
Scott Wakeling97c72b72016-06-24 16:19:36 +01003771vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
3772 const DexFile& dex_file,
3773 uint32_t string_index,
3774 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003775 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3776}
3777
Scott Wakeling97c72b72016-06-24 16:19:36 +01003778vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
3779 const DexFile& dex_file,
3780 uint32_t type_index,
3781 vixl::aarch64::Label* adrp_label) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003782 return NewPcRelativePatch(dex_file, type_index, adrp_label, &pc_relative_type_patches_);
3783}
3784
Scott Wakeling97c72b72016-06-24 16:19:36 +01003785vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
3786 const DexFile& dex_file,
3787 uint32_t element_offset,
3788 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003789 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3790}
3791
Scott Wakeling97c72b72016-06-24 16:19:36 +01003792vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
3793 const DexFile& dex_file,
3794 uint32_t offset_or_index,
3795 vixl::aarch64::Label* adrp_label,
3796 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003797 // Add a patch entry and return the label.
3798 patches->emplace_back(dex_file, offset_or_index);
3799 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003800 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003801 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3802 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3803 return label;
3804}
3805
Scott Wakeling97c72b72016-06-24 16:19:36 +01003806vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003807 const DexFile& dex_file, uint32_t string_index) {
3808 return boot_image_string_patches_.GetOrCreate(
3809 StringReference(&dex_file, string_index),
3810 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3811}
3812
Scott Wakeling97c72b72016-06-24 16:19:36 +01003813vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003814 const DexFile& dex_file, uint32_t type_index) {
3815 return boot_image_type_patches_.GetOrCreate(
3816 TypeReference(&dex_file, type_index),
3817 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3818}
3819
Scott Wakeling97c72b72016-06-24 16:19:36 +01003820vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
3821 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003822 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3823 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3824 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3825}
3826
Scott Wakeling97c72b72016-06-24 16:19:36 +01003827vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(
3828 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003829 return DeduplicateUint64Literal(address);
3830}
3831
Vladimir Marko58155012015-08-19 12:49:41 +00003832void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3833 DCHECK(linker_patches->empty());
3834 size_t size =
3835 method_patches_.size() +
3836 call_patches_.size() +
3837 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003838 pc_relative_dex_cache_patches_.size() +
3839 boot_image_string_patches_.size() +
3840 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003841 boot_image_type_patches_.size() +
3842 pc_relative_type_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003843 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003844 linker_patches->reserve(size);
3845 for (const auto& entry : method_patches_) {
3846 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003847 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3848 linker_patches->push_back(LinkerPatch::MethodPatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003849 target_method.dex_file,
3850 target_method.dex_method_index));
3851 }
3852 for (const auto& entry : call_patches_) {
3853 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003854 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3855 linker_patches->push_back(LinkerPatch::CodePatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003856 target_method.dex_file,
3857 target_method.dex_method_index));
3858 }
Scott Wakeling97c72b72016-06-24 16:19:36 +01003859 for (const MethodPatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) {
3860 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003861 info.target_method.dex_file,
3862 info.target_method.dex_method_index));
3863 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003864 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003865 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003866 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003867 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003868 info.offset_or_index));
3869 }
3870 for (const auto& entry : boot_image_string_patches_) {
3871 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003872 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3873 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003874 target_string.dex_file,
3875 target_string.string_index));
3876 }
3877 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003878 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003879 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003880 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003881 info.offset_or_index));
3882 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003883 for (const auto& entry : boot_image_type_patches_) {
3884 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003885 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3886 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003887 target_type.dex_file,
3888 target_type.type_index));
3889 }
3890 for (const PcRelativePatchInfo& info : pc_relative_type_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003891 linker_patches->push_back(LinkerPatch::RelativeTypePatch(info.label.GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003892 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003893 info.pc_insn_label->GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003894 info.offset_or_index));
3895 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003896 for (const auto& entry : boot_image_address_patches_) {
3897 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003898 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3899 linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003900 }
3901}
3902
Scott Wakeling97c72b72016-06-24 16:19:36 +01003903vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003904 Uint32ToLiteralMap* map) {
3905 return map->GetOrCreate(
3906 value,
3907 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3908}
3909
Scott Wakeling97c72b72016-06-24 16:19:36 +01003910vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003911 return uint64_literals_.GetOrCreate(
3912 value,
3913 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003914}
3915
Scott Wakeling97c72b72016-06-24 16:19:36 +01003916vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003917 MethodReference target_method,
3918 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003919 return map->GetOrCreate(
3920 target_method,
3921 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003922}
3923
Scott Wakeling97c72b72016-06-24 16:19:36 +01003924vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003925 MethodReference target_method) {
3926 return DeduplicateMethodLiteral(target_method, &method_patches_);
3927}
3928
Scott Wakeling97c72b72016-06-24 16:19:36 +01003929vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003930 MethodReference target_method) {
3931 return DeduplicateMethodLiteral(target_method, &call_patches_);
3932}
3933
3934
Andreas Gampe878d58c2015-01-15 23:24:00 -08003935void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003936 // Explicit clinit checks triggered by static invokes must have been pruned by
3937 // art::PrepareForRegisterAllocation.
3938 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003939
Andreas Gampe878d58c2015-01-15 23:24:00 -08003940 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3941 return;
3942 }
3943
Alexandre Ramesd921d642015-04-16 15:07:16 +01003944 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003945 LocationSummary* locations = invoke->GetLocations();
3946 codegen_->GenerateStaticOrDirectCall(
3947 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003948 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003949}
3950
3951void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003952 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3953 return;
3954 }
3955
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003956 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003957 DCHECK(!codegen_->IsLeafMethod());
3958 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3959}
3960
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003961HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
3962 HLoadClass::LoadKind desired_class_load_kind) {
3963 if (kEmitCompilerReadBarrier) {
3964 switch (desired_class_load_kind) {
3965 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3966 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3967 case HLoadClass::LoadKind::kBootImageAddress:
3968 // TODO: Implement for read barrier.
3969 return HLoadClass::LoadKind::kDexCacheViaMethod;
3970 default:
3971 break;
3972 }
3973 }
3974 switch (desired_class_load_kind) {
3975 case HLoadClass::LoadKind::kReferrersClass:
3976 break;
3977 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3978 DCHECK(!GetCompilerOptions().GetCompilePic());
3979 break;
3980 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3981 DCHECK(GetCompilerOptions().GetCompilePic());
3982 break;
3983 case HLoadClass::LoadKind::kBootImageAddress:
3984 break;
3985 case HLoadClass::LoadKind::kDexCacheAddress:
3986 DCHECK(Runtime::Current()->UseJitCompilation());
3987 break;
3988 case HLoadClass::LoadKind::kDexCachePcRelative:
3989 DCHECK(!Runtime::Current()->UseJitCompilation());
3990 break;
3991 case HLoadClass::LoadKind::kDexCacheViaMethod:
3992 break;
3993 }
3994 return desired_class_load_kind;
3995}
3996
Alexandre Rames67555f72014-11-18 10:55:16 +00003997void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003998 if (cls->NeedsAccessCheck()) {
3999 InvokeRuntimeCallingConvention calling_convention;
4000 CodeGenerator::CreateLoadClassLocationSummary(
4001 cls,
4002 LocationFrom(calling_convention.GetRegisterAt(0)),
Scott Wakeling97c72b72016-06-24 16:19:36 +01004003 LocationFrom(vixl::aarch64::x0),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004004 /* code_generator_supports_read_barrier */ true);
4005 return;
4006 }
4007
4008 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
4009 ? LocationSummary::kCallOnSlowPath
4010 : LocationSummary::kNoCall;
4011 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
4012 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4013 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
4014 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
4015 locations->SetInAt(0, Location::RequiresRegister());
4016 }
4017 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004018}
4019
4020void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01004021 if (cls->NeedsAccessCheck()) {
4022 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
4023 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
4024 cls,
4025 cls->GetDexPc(),
4026 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004027 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01004028 return;
4029 }
4030
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004031 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004032 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004033
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004034 bool generate_null_check = false;
4035 switch (cls->GetLoadKind()) {
4036 case HLoadClass::LoadKind::kReferrersClass: {
4037 DCHECK(!cls->CanCallRuntime());
4038 DCHECK(!cls->MustGenerateClinitCheck());
4039 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4040 Register current_method = InputRegisterAt(cls, 0);
4041 GenerateGcRootFieldLoad(
4042 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4043 break;
4044 }
4045 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4046 DCHECK(!kEmitCompilerReadBarrier);
4047 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4048 cls->GetTypeIndex()));
4049 break;
4050 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
4051 DCHECK(!kEmitCompilerReadBarrier);
4052 // Add ADRP with its PC-relative type patch.
4053 const DexFile& dex_file = cls->GetDexFile();
4054 uint32_t type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004055 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004056 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004057 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004058 __ Bind(adrp_label);
4059 __ adrp(out.X(), /* offset placeholder */ 0);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004060 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004061 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004062 vixl::aarch64::Label* add_label =
4063 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004064 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004065 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004066 __ Bind(add_label);
4067 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004068 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004069 break;
4070 }
4071 case HLoadClass::LoadKind::kBootImageAddress: {
4072 DCHECK(!kEmitCompilerReadBarrier);
4073 DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
4074 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
4075 break;
4076 }
4077 case HLoadClass::LoadKind::kDexCacheAddress: {
4078 DCHECK_NE(cls->GetAddress(), 0u);
4079 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4080 // that gives a 16KiB range. To try and reduce the number of literals if we load
4081 // multiple types, simply split the dex cache address to a 16KiB aligned base
4082 // loaded from a literal and the remaining offset embedded in the load.
4083 static_assert(sizeof(GcRoot<mirror::Class>) == 4u, "Expected GC root to be 4 bytes.");
4084 DCHECK_ALIGNED(cls->GetAddress(), 4u);
4085 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4086 uint64_t base_address = cls->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4087 uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits);
4088 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4089 // /* GcRoot<mirror::Class> */ out = *(base_address + offset)
4090 GenerateGcRootFieldLoad(cls, out_loc, out.X(), offset);
4091 generate_null_check = !cls->IsInDexCache();
4092 break;
4093 }
4094 case HLoadClass::LoadKind::kDexCachePcRelative: {
4095 // Add ADRP with its PC-relative DexCache access patch.
4096 const DexFile& dex_file = cls->GetDexFile();
4097 uint32_t element_offset = cls->GetDexCacheElementOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004098 vixl::aarch64::Label* adrp_label =
4099 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004100 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004101 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004102 __ Bind(adrp_label);
4103 __ adrp(out.X(), /* offset placeholder */ 0);
4104 }
4105 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004106 vixl::aarch64::Label* ldr_label =
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004107 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4108 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4109 GenerateGcRootFieldLoad(cls, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4110 generate_null_check = !cls->IsInDexCache();
4111 break;
4112 }
4113 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4114 MemberOffset resolved_types_offset =
4115 ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
4116 // /* GcRoot<mirror::Class>[] */ out =
4117 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4118 Register current_method = InputRegisterAt(cls, 0);
4119 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
4120 // /* GcRoot<mirror::Class> */ out = out[type_index]
4121 GenerateGcRootFieldLoad(
4122 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
4123 generate_null_check = !cls->IsInDexCache();
4124 break;
4125 }
4126 }
4127
4128 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4129 DCHECK(cls->CanCallRuntime());
4130 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4131 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4132 codegen_->AddSlowPath(slow_path);
4133 if (generate_null_check) {
4134 __ Cbz(out, slow_path->GetEntryLabel());
4135 }
4136 if (cls->MustGenerateClinitCheck()) {
4137 GenerateClassInitializationCheck(slow_path, out);
4138 } else {
4139 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004140 }
4141 }
4142}
4143
David Brazdilcb1c0552015-08-04 16:22:25 +01004144static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004145 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004146}
4147
Alexandre Rames67555f72014-11-18 10:55:16 +00004148void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4149 LocationSummary* locations =
4150 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4151 locations->SetOut(Location::RequiresRegister());
4152}
4153
4154void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004155 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4156}
4157
4158void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4159 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4160}
4161
4162void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4163 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004164}
4165
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004166HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4167 HLoadString::LoadKind desired_string_load_kind) {
4168 if (kEmitCompilerReadBarrier) {
4169 switch (desired_string_load_kind) {
4170 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4171 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4172 case HLoadString::LoadKind::kBootImageAddress:
4173 // TODO: Implement for read barrier.
4174 return HLoadString::LoadKind::kDexCacheViaMethod;
4175 default:
4176 break;
4177 }
4178 }
4179 switch (desired_string_load_kind) {
4180 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4181 DCHECK(!GetCompilerOptions().GetCompilePic());
4182 break;
4183 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4184 DCHECK(GetCompilerOptions().GetCompilePic());
4185 break;
4186 case HLoadString::LoadKind::kBootImageAddress:
4187 break;
4188 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01004189 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004190 break;
4191 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01004192 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004193 break;
4194 case HLoadString::LoadKind::kDexCacheViaMethod:
4195 break;
4196 }
4197 return desired_string_load_kind;
4198}
4199
Alexandre Rames67555f72014-11-18 10:55:16 +00004200void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004201 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004202 ? LocationSummary::kCallOnSlowPath
4203 : LocationSummary::kNoCall;
4204 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004205 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4206 locations->SetInAt(0, Location::RequiresRegister());
4207 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004208 locations->SetOut(Location::RequiresRegister());
4209}
4210
4211void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004212 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004213
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004214 switch (load->GetLoadKind()) {
4215 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4216 DCHECK(!kEmitCompilerReadBarrier);
4217 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4218 load->GetStringIndex()));
4219 return; // No dex cache slow path.
4220 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4221 DCHECK(!kEmitCompilerReadBarrier);
4222 // Add ADRP with its PC-relative String patch.
4223 const DexFile& dex_file = load->GetDexFile();
4224 uint32_t string_index = load->GetStringIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004225 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004226 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004227 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004228 __ Bind(adrp_label);
4229 __ adrp(out.X(), /* offset placeholder */ 0);
4230 }
4231 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004232 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004233 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4234 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004235 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004236 __ Bind(add_label);
4237 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4238 }
4239 return; // No dex cache slow path.
4240 }
4241 case HLoadString::LoadKind::kBootImageAddress: {
4242 DCHECK(!kEmitCompilerReadBarrier);
4243 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4244 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4245 return; // No dex cache slow path.
4246 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004247 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004248 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004249 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004250
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004251 // TODO: Re-add the compiler code to do string dex cache lookup again.
4252 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
4253 codegen_->AddSlowPath(slow_path);
4254 __ B(slow_path->GetEntryLabel());
4255 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004256}
4257
Alexandre Rames5319def2014-10-23 10:03:10 +01004258void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4259 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4260 locations->SetOut(Location::ConstantLocation(constant));
4261}
4262
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004263void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004264 // Will be generated at use site.
4265}
4266
Alexandre Rames67555f72014-11-18 10:55:16 +00004267void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4268 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004269 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004270 InvokeRuntimeCallingConvention calling_convention;
4271 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4272}
4273
4274void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4275 codegen_->InvokeRuntime(instruction->IsEnter()
4276 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
4277 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004278 instruction->GetDexPc(),
4279 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004280 if (instruction->IsEnter()) {
4281 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4282 } else {
4283 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4284 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004285}
4286
Alexandre Rames42d641b2014-10-27 14:00:51 +00004287void LocationsBuilderARM64::VisitMul(HMul* mul) {
4288 LocationSummary* locations =
4289 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4290 switch (mul->GetResultType()) {
4291 case Primitive::kPrimInt:
4292 case Primitive::kPrimLong:
4293 locations->SetInAt(0, Location::RequiresRegister());
4294 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004295 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004296 break;
4297
4298 case Primitive::kPrimFloat:
4299 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004300 locations->SetInAt(0, Location::RequiresFpuRegister());
4301 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004302 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004303 break;
4304
4305 default:
4306 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4307 }
4308}
4309
4310void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4311 switch (mul->GetResultType()) {
4312 case Primitive::kPrimInt:
4313 case Primitive::kPrimLong:
4314 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4315 break;
4316
4317 case Primitive::kPrimFloat:
4318 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004319 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004320 break;
4321
4322 default:
4323 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4324 }
4325}
4326
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004327void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4328 LocationSummary* locations =
4329 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4330 switch (neg->GetResultType()) {
4331 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004332 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004333 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004334 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004335 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004336
4337 case Primitive::kPrimFloat:
4338 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004339 locations->SetInAt(0, Location::RequiresFpuRegister());
4340 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004341 break;
4342
4343 default:
4344 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4345 }
4346}
4347
4348void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4349 switch (neg->GetResultType()) {
4350 case Primitive::kPrimInt:
4351 case Primitive::kPrimLong:
4352 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4353 break;
4354
4355 case Primitive::kPrimFloat:
4356 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004357 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004358 break;
4359
4360 default:
4361 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4362 }
4363}
4364
4365void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4366 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004367 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004368 InvokeRuntimeCallingConvention calling_convention;
4369 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004370 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004371 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004372 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004373}
4374
4375void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4376 LocationSummary* locations = instruction->GetLocations();
4377 InvokeRuntimeCallingConvention calling_convention;
4378 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4379 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004380 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004381 // Note: if heap poisoning is enabled, the entry point takes cares
4382 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01004383 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4384 instruction,
4385 instruction->GetDexPc(),
4386 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004387 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004388}
4389
Alexandre Rames5319def2014-10-23 10:03:10 +01004390void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4391 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004392 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004393 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004394 if (instruction->IsStringAlloc()) {
4395 locations->AddTemp(LocationFrom(kArtMethodRegister));
4396 } else {
4397 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4398 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4399 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004400 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4401}
4402
4403void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004404 // Note: if heap poisoning is enabled, the entry point takes cares
4405 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004406 if (instruction->IsStringAlloc()) {
4407 // String is allocated through StringFactory. Call NewEmptyString entry point.
4408 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07004409 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004410 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4411 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4412 __ Blr(lr);
4413 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4414 } else {
4415 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4416 instruction,
4417 instruction->GetDexPc(),
4418 nullptr);
4419 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4420 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004421}
4422
4423void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4424 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004425 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004426 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004427}
4428
4429void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004430 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004431 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004432 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004433 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004434 break;
4435
4436 default:
4437 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4438 }
4439}
4440
David Brazdil66d126e2015-04-03 16:02:44 +01004441void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4442 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4443 locations->SetInAt(0, Location::RequiresRegister());
4444 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4445}
4446
4447void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004448 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01004449}
4450
Alexandre Rames5319def2014-10-23 10:03:10 +01004451void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004452 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4453 ? LocationSummary::kCallOnSlowPath
4454 : LocationSummary::kNoCall;
4455 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004456 locations->SetInAt(0, Location::RequiresRegister());
4457 if (instruction->HasUses()) {
4458 locations->SetOut(Location::SameAsFirstInput());
4459 }
4460}
4461
Calin Juravle2ae48182016-03-16 14:05:09 +00004462void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4463 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004464 return;
4465 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004466
Alexandre Ramesd921d642015-04-16 15:07:16 +01004467 BlockPoolsScope block_pools(GetVIXLAssembler());
4468 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004469 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004470 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004471}
4472
Calin Juravle2ae48182016-03-16 14:05:09 +00004473void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004474 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004475 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004476
4477 LocationSummary* locations = instruction->GetLocations();
4478 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004479
4480 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004481}
4482
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004483void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004484 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004485}
4486
Alexandre Rames67555f72014-11-18 10:55:16 +00004487void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4488 HandleBinaryOp(instruction);
4489}
4490
4491void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4492 HandleBinaryOp(instruction);
4493}
4494
Alexandre Rames3e69f162014-12-10 10:36:50 +00004495void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4496 LOG(FATAL) << "Unreachable";
4497}
4498
4499void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4500 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4501}
4502
Alexandre Rames5319def2014-10-23 10:03:10 +01004503void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4504 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4505 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4506 if (location.IsStackSlot()) {
4507 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4508 } else if (location.IsDoubleStackSlot()) {
4509 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4510 }
4511 locations->SetOut(location);
4512}
4513
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004514void InstructionCodeGeneratorARM64::VisitParameterValue(
4515 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004516 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004517}
4518
4519void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4520 LocationSummary* locations =
4521 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004522 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004523}
4524
4525void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4526 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4527 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004528}
4529
4530void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4531 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004532 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004533 locations->SetInAt(i, Location::Any());
4534 }
4535 locations->SetOut(Location::Any());
4536}
4537
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004538void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004539 LOG(FATAL) << "Unreachable";
4540}
4541
Serban Constantinescu02164b32014-11-13 14:05:07 +00004542void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004543 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004544 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004545 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
4546 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004547 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4548
4549 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004550 case Primitive::kPrimInt:
4551 case Primitive::kPrimLong:
4552 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004553 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004554 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4555 break;
4556
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004557 case Primitive::kPrimFloat:
4558 case Primitive::kPrimDouble: {
4559 InvokeRuntimeCallingConvention calling_convention;
4560 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4561 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4562 locations->SetOut(calling_convention.GetReturnLocation(type));
4563
4564 break;
4565 }
4566
Serban Constantinescu02164b32014-11-13 14:05:07 +00004567 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004568 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004569 }
4570}
4571
4572void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4573 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004574
Serban Constantinescu02164b32014-11-13 14:05:07 +00004575 switch (type) {
4576 case Primitive::kPrimInt:
4577 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004578 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004579 break;
4580 }
4581
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004582 case Primitive::kPrimFloat:
4583 case Primitive::kPrimDouble: {
4584 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
4585 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004586 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004587 if (type == Primitive::kPrimFloat) {
4588 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4589 } else {
4590 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4591 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004592 break;
4593 }
4594
Serban Constantinescu02164b32014-11-13 14:05:07 +00004595 default:
4596 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004597 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004598 }
4599}
4600
Calin Juravle27df7582015-04-17 19:12:31 +01004601void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4602 memory_barrier->SetLocations(nullptr);
4603}
4604
4605void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004606 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004607}
4608
Alexandre Rames5319def2014-10-23 10:03:10 +01004609void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4610 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4611 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004612 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004613}
4614
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004615void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004616 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004617}
4618
4619void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4620 instruction->SetLocations(nullptr);
4621}
4622
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004623void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004624 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004625}
4626
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004627void LocationsBuilderARM64::VisitRor(HRor* ror) {
4628 HandleBinaryOp(ror);
4629}
4630
4631void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4632 HandleBinaryOp(ror);
4633}
4634
Serban Constantinescu02164b32014-11-13 14:05:07 +00004635void LocationsBuilderARM64::VisitShl(HShl* shl) {
4636 HandleShift(shl);
4637}
4638
4639void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4640 HandleShift(shl);
4641}
4642
4643void LocationsBuilderARM64::VisitShr(HShr* shr) {
4644 HandleShift(shr);
4645}
4646
4647void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4648 HandleShift(shr);
4649}
4650
Alexandre Rames5319def2014-10-23 10:03:10 +01004651void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004652 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004653}
4654
4655void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004656 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004657}
4658
Alexandre Rames67555f72014-11-18 10:55:16 +00004659void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004660 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004661}
4662
4663void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004664 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004665}
4666
4667void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004668 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004669}
4670
Alexandre Rames67555f72014-11-18 10:55:16 +00004671void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004672 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004673}
4674
Calin Juravlee460d1d2015-09-29 04:52:17 +01004675void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4676 HUnresolvedInstanceFieldGet* instruction) {
4677 FieldAccessCallingConventionARM64 calling_convention;
4678 codegen_->CreateUnresolvedFieldLocationSummary(
4679 instruction, instruction->GetFieldType(), calling_convention);
4680}
4681
4682void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4683 HUnresolvedInstanceFieldGet* instruction) {
4684 FieldAccessCallingConventionARM64 calling_convention;
4685 codegen_->GenerateUnresolvedFieldAccess(instruction,
4686 instruction->GetFieldType(),
4687 instruction->GetFieldIndex(),
4688 instruction->GetDexPc(),
4689 calling_convention);
4690}
4691
4692void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4693 HUnresolvedInstanceFieldSet* instruction) {
4694 FieldAccessCallingConventionARM64 calling_convention;
4695 codegen_->CreateUnresolvedFieldLocationSummary(
4696 instruction, instruction->GetFieldType(), calling_convention);
4697}
4698
4699void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4700 HUnresolvedInstanceFieldSet* instruction) {
4701 FieldAccessCallingConventionARM64 calling_convention;
4702 codegen_->GenerateUnresolvedFieldAccess(instruction,
4703 instruction->GetFieldType(),
4704 instruction->GetFieldIndex(),
4705 instruction->GetDexPc(),
4706 calling_convention);
4707}
4708
4709void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4710 HUnresolvedStaticFieldGet* instruction) {
4711 FieldAccessCallingConventionARM64 calling_convention;
4712 codegen_->CreateUnresolvedFieldLocationSummary(
4713 instruction, instruction->GetFieldType(), calling_convention);
4714}
4715
4716void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4717 HUnresolvedStaticFieldGet* instruction) {
4718 FieldAccessCallingConventionARM64 calling_convention;
4719 codegen_->GenerateUnresolvedFieldAccess(instruction,
4720 instruction->GetFieldType(),
4721 instruction->GetFieldIndex(),
4722 instruction->GetDexPc(),
4723 calling_convention);
4724}
4725
4726void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4727 HUnresolvedStaticFieldSet* instruction) {
4728 FieldAccessCallingConventionARM64 calling_convention;
4729 codegen_->CreateUnresolvedFieldLocationSummary(
4730 instruction, instruction->GetFieldType(), calling_convention);
4731}
4732
4733void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4734 HUnresolvedStaticFieldSet* instruction) {
4735 FieldAccessCallingConventionARM64 calling_convention;
4736 codegen_->GenerateUnresolvedFieldAccess(instruction,
4737 instruction->GetFieldType(),
4738 instruction->GetFieldIndex(),
4739 instruction->GetDexPc(),
4740 calling_convention);
4741}
4742
Alexandre Rames5319def2014-10-23 10:03:10 +01004743void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4744 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4745}
4746
4747void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004748 HBasicBlock* block = instruction->GetBlock();
4749 if (block->GetLoopInformation() != nullptr) {
4750 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4751 // The back edge will generate the suspend check.
4752 return;
4753 }
4754 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4755 // The goto will generate the suspend check.
4756 return;
4757 }
4758 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004759}
4760
Alexandre Rames67555f72014-11-18 10:55:16 +00004761void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4762 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004763 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004764 InvokeRuntimeCallingConvention calling_convention;
4765 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4766}
4767
4768void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
4769 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004770 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004771 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004772}
4773
4774void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4775 LocationSummary* locations =
4776 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4777 Primitive::Type input_type = conversion->GetInputType();
4778 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004779 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004780 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4781 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4782 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4783 }
4784
Alexandre Rames542361f2015-01-29 16:57:31 +00004785 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004786 locations->SetInAt(0, Location::RequiresFpuRegister());
4787 } else {
4788 locations->SetInAt(0, Location::RequiresRegister());
4789 }
4790
Alexandre Rames542361f2015-01-29 16:57:31 +00004791 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004792 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4793 } else {
4794 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4795 }
4796}
4797
4798void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4799 Primitive::Type result_type = conversion->GetResultType();
4800 Primitive::Type input_type = conversion->GetInputType();
4801
4802 DCHECK_NE(input_type, result_type);
4803
Alexandre Rames542361f2015-01-29 16:57:31 +00004804 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004805 int result_size = Primitive::ComponentSize(result_type);
4806 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004807 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004808 Register output = OutputRegister(conversion);
4809 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004810 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004811 // 'int' values are used directly as W registers, discarding the top
4812 // bits, so we don't need to sign-extend and can just perform a move.
4813 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4814 // top 32 bits of the target register. We theoretically could leave those
4815 // bits unchanged, but we would have to make sure that no code uses a
4816 // 32bit input value as a 64bit value assuming that the top 32 bits are
4817 // zero.
4818 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004819 } else if (result_type == Primitive::kPrimChar ||
4820 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4821 __ Ubfx(output,
4822 output.IsX() ? source.X() : source.W(),
4823 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004824 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004825 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004826 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004827 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004828 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004829 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004830 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4831 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004832 } else if (Primitive::IsFloatingPointType(result_type) &&
4833 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004834 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4835 } else {
4836 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4837 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004838 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004839}
Alexandre Rames67555f72014-11-18 10:55:16 +00004840
Serban Constantinescu02164b32014-11-13 14:05:07 +00004841void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4842 HandleShift(ushr);
4843}
4844
4845void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4846 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004847}
4848
4849void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4850 HandleBinaryOp(instruction);
4851}
4852
4853void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4854 HandleBinaryOp(instruction);
4855}
4856
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004857void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004858 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004859 LOG(FATAL) << "Unreachable";
4860}
4861
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004862void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004863 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004864 LOG(FATAL) << "Unreachable";
4865}
4866
Mark Mendellfe57faa2015-09-18 09:26:15 -04004867// Simple implementation of packed switch - generate cascaded compare/jumps.
4868void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4869 LocationSummary* locations =
4870 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4871 locations->SetInAt(0, Location::RequiresRegister());
4872}
4873
4874void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4875 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004876 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004877 Register value_reg = InputRegisterAt(switch_instr, 0);
4878 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4879
Zheng Xu3927c8b2015-11-18 17:46:25 +08004880 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004881 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08004882 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4883 // make sure we don't emit it if the target may run out of range.
4884 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4885 // ranges and emit the tables only as required.
4886 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004887
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004888 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004889 // Current instruction id is an upper bound of the number of HIRs in the graph.
4890 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4891 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004892 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4893 Register temp = temps.AcquireW();
4894 __ Subs(temp, value_reg, Operand(lower_bound));
4895
Zheng Xu3927c8b2015-11-18 17:46:25 +08004896 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004897 // Jump to successors[0] if value == lower_bound.
4898 __ B(eq, codegen_->GetLabelOf(successors[0]));
4899 int32_t last_index = 0;
4900 for (; num_entries - last_index > 2; last_index += 2) {
4901 __ Subs(temp, temp, Operand(2));
4902 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4903 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4904 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4905 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4906 }
4907 if (num_entries - last_index == 2) {
4908 // The last missing case_value.
4909 __ Cmp(temp, Operand(1));
4910 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004911 }
4912
4913 // And the default for any other value.
4914 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4915 __ B(codegen_->GetLabelOf(default_block));
4916 }
4917 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004918 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004919
4920 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4921
4922 // Below instructions should use at most one blocked register. Since there are two blocked
4923 // registers, we are free to block one.
4924 Register temp_w = temps.AcquireW();
4925 Register index;
4926 // Remove the bias.
4927 if (lower_bound != 0) {
4928 index = temp_w;
4929 __ Sub(index, value_reg, Operand(lower_bound));
4930 } else {
4931 index = value_reg;
4932 }
4933
4934 // Jump to default block if index is out of the range.
4935 __ Cmp(index, Operand(num_entries));
4936 __ B(hs, codegen_->GetLabelOf(default_block));
4937
4938 // In current VIXL implementation, it won't require any blocked registers to encode the
4939 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4940 // register pressure.
4941 Register table_base = temps.AcquireX();
4942 // Load jump offset from the table.
4943 __ Adr(table_base, jump_table->GetTableStartLabel());
4944 Register jump_offset = temp_w;
4945 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4946
4947 // Jump to target block by branching to table_base(pc related) + offset.
4948 Register target_address = table_base;
4949 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4950 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004951 }
4952}
4953
Roland Levillain44015862016-01-22 11:47:17 +00004954void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4955 Location out,
4956 uint32_t offset,
4957 Location maybe_temp) {
4958 Primitive::Type type = Primitive::kPrimNot;
4959 Register out_reg = RegisterFrom(out, type);
4960 if (kEmitCompilerReadBarrier) {
4961 Register temp_reg = RegisterFrom(maybe_temp, type);
4962 if (kUseBakerReadBarrier) {
4963 // Load with fast path based Baker's read barrier.
4964 // /* HeapReference<Object> */ out = *(out + offset)
4965 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4966 out,
4967 out_reg,
4968 offset,
4969 temp_reg,
4970 /* needs_null_check */ false,
4971 /* use_load_acquire */ false);
4972 } else {
4973 // Load with slow path based read barrier.
4974 // Save the value of `out` into `maybe_temp` before overwriting it
4975 // in the following move operation, as we will need it for the
4976 // read barrier below.
4977 __ Mov(temp_reg, out_reg);
4978 // /* HeapReference<Object> */ out = *(out + offset)
4979 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4980 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4981 }
4982 } else {
4983 // Plain load with no read barrier.
4984 // /* HeapReference<Object> */ out = *(out + offset)
4985 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4986 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4987 }
4988}
4989
4990void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
4991 Location out,
4992 Location obj,
4993 uint32_t offset,
4994 Location maybe_temp) {
4995 Primitive::Type type = Primitive::kPrimNot;
4996 Register out_reg = RegisterFrom(out, type);
4997 Register obj_reg = RegisterFrom(obj, type);
4998 if (kEmitCompilerReadBarrier) {
4999 if (kUseBakerReadBarrier) {
5000 // Load with fast path based Baker's read barrier.
5001 Register temp_reg = RegisterFrom(maybe_temp, type);
5002 // /* HeapReference<Object> */ out = *(obj + offset)
5003 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5004 out,
5005 obj_reg,
5006 offset,
5007 temp_reg,
5008 /* needs_null_check */ false,
5009 /* use_load_acquire */ false);
5010 } else {
5011 // Load with slow path based read barrier.
5012 // /* HeapReference<Object> */ out = *(obj + offset)
5013 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5014 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5015 }
5016 } else {
5017 // Plain load with no read barrier.
5018 // /* HeapReference<Object> */ out = *(obj + offset)
5019 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5020 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5021 }
5022}
5023
5024void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
5025 Location root,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005026 Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005027 uint32_t offset,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005028 vixl::aarch64::Label* fixup_label) {
Roland Levillain44015862016-01-22 11:47:17 +00005029 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
5030 if (kEmitCompilerReadBarrier) {
5031 if (kUseBakerReadBarrier) {
5032 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5033 // Baker's read barrier are used:
5034 //
5035 // root = obj.field;
5036 // if (Thread::Current()->GetIsGcMarking()) {
5037 // root = ReadBarrier::Mark(root)
5038 // }
5039
5040 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005041 if (fixup_label == nullptr) {
5042 __ Ldr(root_reg, MemOperand(obj, offset));
5043 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005044 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005045 __ Bind(fixup_label);
5046 __ ldr(root_reg, MemOperand(obj, offset));
5047 }
Roland Levillain44015862016-01-22 11:47:17 +00005048 static_assert(
5049 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5050 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5051 "have different sizes.");
5052 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5053 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5054 "have different sizes.");
5055
Vladimir Marko953437b2016-08-24 08:30:46 +00005056 // Slow path marking the GC root `root`.
Roland Levillain44015862016-01-22 11:47:17 +00005057 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005058 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root);
Roland Levillain44015862016-01-22 11:47:17 +00005059 codegen_->AddSlowPath(slow_path);
5060
5061 MacroAssembler* masm = GetVIXLAssembler();
5062 UseScratchRegisterScope temps(masm);
5063 Register temp = temps.AcquireW();
5064 // temp = Thread::Current()->GetIsGcMarking()
Andreas Gampe542451c2016-07-26 09:02:02 -07005065 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64PointerSize>().Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00005066 __ Cbnz(temp, slow_path->GetEntryLabel());
5067 __ Bind(slow_path->GetExitLabel());
5068 } else {
5069 // GC root loaded through a slow path for read barriers other
5070 // than Baker's.
5071 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005072 if (fixup_label == nullptr) {
5073 __ Add(root_reg.X(), obj.X(), offset);
5074 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005075 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005076 __ Bind(fixup_label);
5077 __ add(root_reg.X(), obj.X(), offset);
5078 }
Roland Levillain44015862016-01-22 11:47:17 +00005079 // /* mirror::Object* */ root = root->Read()
5080 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5081 }
5082 } else {
5083 // Plain GC root load with no read barrier.
5084 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005085 if (fixup_label == nullptr) {
5086 __ Ldr(root_reg, MemOperand(obj, offset));
5087 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005088 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005089 __ Bind(fixup_label);
5090 __ ldr(root_reg, MemOperand(obj, offset));
5091 }
Roland Levillain44015862016-01-22 11:47:17 +00005092 // Note that GC roots are not affected by heap poisoning, thus we
5093 // do not have to unpoison `root_reg` here.
5094 }
5095}
5096
5097void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5098 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005099 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005100 uint32_t offset,
5101 Register temp,
5102 bool needs_null_check,
5103 bool use_load_acquire) {
5104 DCHECK(kEmitCompilerReadBarrier);
5105 DCHECK(kUseBakerReadBarrier);
5106
5107 // /* HeapReference<Object> */ ref = *(obj + offset)
5108 Location no_index = Location::NoLocation();
Roland Levillainbfea3352016-06-23 13:48:47 +01005109 size_t no_scale_factor = 0U;
5110 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5111 ref,
5112 obj,
5113 offset,
5114 no_index,
5115 no_scale_factor,
5116 temp,
5117 needs_null_check,
5118 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005119}
5120
5121void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5122 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005123 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005124 uint32_t data_offset,
5125 Location index,
5126 Register temp,
5127 bool needs_null_check) {
5128 DCHECK(kEmitCompilerReadBarrier);
5129 DCHECK(kUseBakerReadBarrier);
5130
5131 // Array cells are never volatile variables, therefore array loads
5132 // never use Load-Acquire instructions on ARM64.
5133 const bool use_load_acquire = false;
5134
Roland Levillainbfea3352016-06-23 13:48:47 +01005135 static_assert(
5136 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5137 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005138 // /* HeapReference<Object> */ ref =
5139 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005140 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5141 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5142 ref,
5143 obj,
5144 data_offset,
5145 index,
5146 scale_factor,
5147 temp,
5148 needs_null_check,
5149 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005150}
5151
5152void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5153 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005154 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005155 uint32_t offset,
5156 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005157 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005158 Register temp,
5159 bool needs_null_check,
5160 bool use_load_acquire) {
5161 DCHECK(kEmitCompilerReadBarrier);
5162 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005163 // If we are emitting an array load, we should not be using a
5164 // Load Acquire instruction. In other words:
5165 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5166 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005167
5168 MacroAssembler* masm = GetVIXLAssembler();
5169 UseScratchRegisterScope temps(masm);
5170
5171 // In slow path based read barriers, the read barrier call is
5172 // inserted after the original load. However, in fast path based
5173 // Baker's read barriers, we need to perform the load of
5174 // mirror::Object::monitor_ *before* the original reference load.
5175 // This load-load ordering is required by the read barrier.
5176 // The fast path/slow path (for Baker's algorithm) should look like:
5177 //
5178 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5179 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5180 // HeapReference<Object> ref = *src; // Original reference load.
5181 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5182 // if (is_gray) {
5183 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5184 // }
5185 //
5186 // Note: the original implementation in ReadBarrier::Barrier is
5187 // slightly more complex as it performs additional checks that we do
5188 // not do here for performance reasons.
5189
5190 Primitive::Type type = Primitive::kPrimNot;
5191 Register ref_reg = RegisterFrom(ref, type);
5192 DCHECK(obj.IsW());
5193 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5194
5195 // /* int32_t */ monitor = obj->monitor_
5196 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5197 if (needs_null_check) {
5198 MaybeRecordImplicitNullCheck(instruction);
5199 }
5200 // /* LockWord */ lock_word = LockWord(monitor)
5201 static_assert(sizeof(LockWord) == sizeof(int32_t),
5202 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005203
Vladimir Marko877a0332016-07-11 19:30:56 +01005204 // Introduce a dependency on the lock_word including rb_state,
5205 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005206 // a memory barrier (which would be more expensive).
Roland Levillain0b671c02016-08-19 12:02:34 +01005207 // `obj` is unchanged by this operation, but its value now depends
5208 // on `temp`.
Vladimir Marko877a0332016-07-11 19:30:56 +01005209 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005210
5211 // The actual reference load.
5212 if (index.IsValid()) {
Roland Levillainbfea3352016-06-23 13:48:47 +01005213 // Load types involving an "index".
5214 if (use_load_acquire) {
5215 // UnsafeGetObjectVolatile intrinsic case.
5216 // Register `index` is not an index in an object array, but an
5217 // offset to an object reference field within object `obj`.
5218 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5219 DCHECK(instruction->GetLocations()->Intrinsified());
5220 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5221 << instruction->AsInvoke()->GetIntrinsic();
5222 DCHECK_EQ(offset, 0U);
5223 DCHECK_EQ(scale_factor, 0U);
5224 DCHECK_EQ(needs_null_check, 0U);
5225 // /* HeapReference<Object> */ ref = *(obj + index)
5226 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5227 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005228 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005229 // ArrayGet and UnsafeGetObject intrinsics cases.
5230 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5231 if (index.IsConstant()) {
5232 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5233 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5234 } else {
Vladimir Marko877a0332016-07-11 19:30:56 +01005235 Register temp2 = temps.AcquireW();
Roland Levillainbfea3352016-06-23 13:48:47 +01005236 __ Add(temp2, obj, offset);
5237 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, scale_factor));
5238 temps.Release(temp2);
5239 }
Roland Levillain44015862016-01-22 11:47:17 +00005240 }
Roland Levillain44015862016-01-22 11:47:17 +00005241 } else {
5242 // /* HeapReference<Object> */ ref = *(obj + offset)
5243 MemOperand field = HeapOperand(obj, offset);
5244 if (use_load_acquire) {
5245 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5246 } else {
5247 Load(type, ref_reg, field);
5248 }
5249 }
5250
5251 // Object* ref = ref_addr->AsMirrorPtr()
5252 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5253
Vladimir Marko953437b2016-08-24 08:30:46 +00005254 // Slow path marking the object `ref` when it is gray.
Roland Levillain44015862016-01-22 11:47:17 +00005255 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005256 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
Roland Levillain44015862016-01-22 11:47:17 +00005257 AddSlowPath(slow_path);
5258
5259 // if (rb_state == ReadBarrier::gray_ptr_)
5260 // ref = ReadBarrier::Mark(ref);
Vladimir Marko877a0332016-07-11 19:30:56 +01005261 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5262 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
5263 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
5264 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
5265 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005266 __ Bind(slow_path->GetExitLabel());
5267}
5268
5269void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5270 Location out,
5271 Location ref,
5272 Location obj,
5273 uint32_t offset,
5274 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005275 DCHECK(kEmitCompilerReadBarrier);
5276
Roland Levillain44015862016-01-22 11:47:17 +00005277 // Insert a slow path based read barrier *after* the reference load.
5278 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005279 // If heap poisoning is enabled, the unpoisoning of the loaded
5280 // reference will be carried out by the runtime within the slow
5281 // path.
5282 //
5283 // Note that `ref` currently does not get unpoisoned (when heap
5284 // poisoning is enabled), which is alright as the `ref` argument is
5285 // not used by the artReadBarrierSlow entry point.
5286 //
5287 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5288 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5289 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5290 AddSlowPath(slow_path);
5291
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005292 __ B(slow_path->GetEntryLabel());
5293 __ Bind(slow_path->GetExitLabel());
5294}
5295
Roland Levillain44015862016-01-22 11:47:17 +00005296void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5297 Location out,
5298 Location ref,
5299 Location obj,
5300 uint32_t offset,
5301 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005302 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005303 // Baker's read barriers shall be handled by the fast path
5304 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5305 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005306 // If heap poisoning is enabled, unpoisoning will be taken care of
5307 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005308 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005309 } else if (kPoisonHeapReferences) {
5310 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5311 }
5312}
5313
Roland Levillain44015862016-01-22 11:47:17 +00005314void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5315 Location out,
5316 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005317 DCHECK(kEmitCompilerReadBarrier);
5318
Roland Levillain44015862016-01-22 11:47:17 +00005319 // Insert a slow path based read barrier *after* the GC root load.
5320 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005321 // Note that GC roots are not affected by heap poisoning, so we do
5322 // not need to do anything special for this here.
5323 SlowPathCodeARM64* slow_path =
5324 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5325 AddSlowPath(slow_path);
5326
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005327 __ B(slow_path->GetEntryLabel());
5328 __ Bind(slow_path->GetExitLabel());
5329}
5330
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005331void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5332 LocationSummary* locations =
5333 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5334 locations->SetInAt(0, Location::RequiresRegister());
5335 locations->SetOut(Location::RequiresRegister());
5336}
5337
5338void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5339 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005340 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005341 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005342 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005343 __ Ldr(XRegisterFrom(locations->Out()),
5344 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005345 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005346 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005347 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005348 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5349 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005350 __ Ldr(XRegisterFrom(locations->Out()),
5351 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005352 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005353}
5354
5355
5356
Alexandre Rames67555f72014-11-18 10:55:16 +00005357#undef __
5358#undef QUICK_ENTRY_POINT
5359
Alexandre Rames5319def2014-10-23 10:03:10 +01005360} // namespace arm64
5361} // namespace art