blob: 78c164b1fdbb8440725fba9b8e1bfd1ed6850895 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Rames5319def2014-10-23 10:03:10 +010037
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
Roland Levillain22ccc3a2015-11-24 13:10:05 +000044template<class MirrorType>
45class GcRoot;
46
Alexandre Rames5319def2014-10-23 10:03:10 +010047namespace arm64 {
48
Alexandre Ramesbe919d92016-08-23 18:33:36 +010049using helpers::ARM64EncodableConstantOrRegister;
50using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080051using helpers::CPURegisterFrom;
52using helpers::DRegisterFrom;
53using helpers::FPRegisterFrom;
54using helpers::HeapOperand;
55using helpers::HeapOperandFrom;
56using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010057using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080058using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080059using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080061using helpers::Int64ConstantFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010062using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080063using helpers::LocationFrom;
64using helpers::OperandFromMemOperand;
65using helpers::OutputCPURegister;
66using helpers::OutputFPRegister;
67using helpers::OutputRegister;
68using helpers::RegisterFrom;
69using helpers::StackOperandFrom;
70using helpers::VIXLRegCodeFromART;
71using helpers::WRegisterFrom;
72using helpers::XRegisterFrom;
73
Alexandre Rames5319def2014-10-23 10:03:10 +010074static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000075// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080076// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
77// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000078static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010079
Alexandre Rames5319def2014-10-23 10:03:10 +010080inline Condition ARM64Condition(IfCondition cond) {
81 switch (cond) {
82 case kCondEQ: return eq;
83 case kCondNE: return ne;
84 case kCondLT: return lt;
85 case kCondLE: return le;
86 case kCondGT: return gt;
87 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070088 case kCondB: return lo;
89 case kCondBE: return ls;
90 case kCondA: return hi;
91 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010092 }
Roland Levillain7f63c522015-07-13 15:54:55 +000093 LOG(FATAL) << "Unreachable";
94 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010095}
96
Vladimir Markod6e069b2016-01-18 11:11:01 +000097inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
98 // The ARM64 condition codes can express all the necessary branches, see the
99 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
100 // There is no dex instruction or HIR that would need the missing conditions
101 // "equal or unordered" or "not equal".
102 switch (cond) {
103 case kCondEQ: return eq;
104 case kCondNE: return ne /* unordered */;
105 case kCondLT: return gt_bias ? cc : lt /* unordered */;
106 case kCondLE: return gt_bias ? ls : le /* unordered */;
107 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
108 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
109 default:
110 LOG(FATAL) << "UNREACHABLE";
111 UNREACHABLE();
112 }
113}
114
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000116 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
117 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
118 // but we use the exact registers for clarity.
119 if (return_type == Primitive::kPrimFloat) {
120 return LocationFrom(s0);
121 } else if (return_type == Primitive::kPrimDouble) {
122 return LocationFrom(d0);
123 } else if (return_type == Primitive::kPrimLong) {
124 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100125 } else if (return_type == Primitive::kPrimVoid) {
126 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000127 } else {
128 return LocationFrom(w0);
129 }
130}
131
Alexandre Rames5319def2014-10-23 10:03:10 +0100132Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000133 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100134}
135
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100136// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
137#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700138#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100139
Zheng Xuda403092015-04-24 17:35:39 +0800140// Calculate memory accessing operand for save/restore live registers.
141static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100142 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800143 int64_t spill_offset,
144 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100145 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
146 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
147 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800148 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100149 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800150 codegen->GetNumberOfFloatingPointRegisters()));
151
Vladimir Marko804b03f2016-09-14 16:26:36 +0100152 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
153 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800154
155 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
156 UseScratchRegisterScope temps(masm);
157
158 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100159 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
160 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800161 int64_t reg_size = kXRegSizeInBytes;
162 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
163 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100164 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800165 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
166 // If the offset does not fit in the instruction's immediate field, use an alternate register
167 // to compute the base address(float point registers spill base address).
168 Register new_base = temps.AcquireSameSizeAs(base);
169 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
170 base = new_base;
171 spill_offset = -core_spill_size;
172 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
173 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
174 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
175 }
176
177 if (is_save) {
178 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
179 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
180 } else {
181 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
182 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
183 }
184}
185
186void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800187 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100188 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
189 for (uint32_t i : LowToHighBits(core_spills)) {
190 // If the register holds an object, update the stack mask.
191 if (locations->RegisterContainsObject(i)) {
192 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800193 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100194 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
195 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
196 saved_core_stack_offsets_[i] = stack_offset;
197 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800198 }
199
Vladimir Marko804b03f2016-09-14 16:26:36 +0100200 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
201 for (uint32_t i : LowToHighBits(fp_spills)) {
202 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
203 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
204 saved_fpu_stack_offsets_[i] = stack_offset;
205 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800206 }
207
Vladimir Marko804b03f2016-09-14 16:26:36 +0100208 SaveRestoreLiveRegistersHelper(codegen,
209 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800210 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
211}
212
213void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100214 SaveRestoreLiveRegistersHelper(codegen,
215 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800216 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
217}
218
Alexandre Rames5319def2014-10-23 10:03:10 +0100219class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
220 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000221 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100222
Alexandre Rames67555f72014-11-18 10:55:16 +0000223 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100224 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000225 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100226
Alexandre Rames5319def2014-10-23 10:03:10 +0100227 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000228 if (instruction_->CanThrowIntoCatchBlock()) {
229 // Live registers will be restored in the catch block if caught.
230 SaveLiveRegisters(codegen, instruction_->GetLocations());
231 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000232 // We're moving two locations to locations that could overlap, so we need a parallel
233 // move resolver.
234 InvokeRuntimeCallingConvention calling_convention;
235 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100236 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
237 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000238 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
239 ? kQuickThrowStringBounds
240 : kQuickThrowArrayBounds;
241 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100242 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800243 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100244 }
245
Alexandre Rames8158f282015-08-07 10:26:17 +0100246 bool IsFatal() const OVERRIDE { return true; }
247
Alexandre Rames9931f312015-06-19 14:47:01 +0100248 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
249
Alexandre Rames5319def2014-10-23 10:03:10 +0100250 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100251 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
252};
253
Alexandre Rames67555f72014-11-18 10:55:16 +0000254class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
255 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000256 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000257
258 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
259 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
260 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000261 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800262 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000263 }
264
Alexandre Rames8158f282015-08-07 10:26:17 +0100265 bool IsFatal() const OVERRIDE { return true; }
266
Alexandre Rames9931f312015-06-19 14:47:01 +0100267 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
268
Alexandre Rames67555f72014-11-18 10:55:16 +0000269 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000270 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
271};
272
273class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
274 public:
275 LoadClassSlowPathARM64(HLoadClass* cls,
276 HInstruction* at,
277 uint32_t dex_pc,
278 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000279 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000280 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
281 }
282
283 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
284 LocationSummary* locations = at_->GetLocations();
285 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
286
287 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000288 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000289
290 InvokeRuntimeCallingConvention calling_convention;
291 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000292 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
293 : kQuickInitializeType;
294 arm64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800295 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100296 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800297 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100298 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800299 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000300
301 // Move the class to the desired location.
302 Location out = locations->Out();
303 if (out.IsValid()) {
304 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
305 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000306 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000307 }
308
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000309 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000310 __ B(GetExitLabel());
311 }
312
Alexandre Rames9931f312015-06-19 14:47:01 +0100313 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
314
Alexandre Rames67555f72014-11-18 10:55:16 +0000315 private:
316 // The class this slow path will load.
317 HLoadClass* const cls_;
318
319 // The instruction where this slow path is happening.
320 // (Might be the load class or an initialization check).
321 HInstruction* const at_;
322
323 // The dex PC of `at_`.
324 const uint32_t dex_pc_;
325
326 // Whether to initialize the class.
327 const bool do_clinit_;
328
329 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
330};
331
Vladimir Markoaad75c62016-10-03 08:46:48 +0000332class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
333 public:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100334 LoadStringSlowPathARM64(HLoadString* instruction, Register temp, vixl::aarch64::Label* adrp_label)
335 : SlowPathCodeARM64(instruction),
336 temp_(temp),
337 adrp_label_(adrp_label) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000338
339 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
340 LocationSummary* locations = instruction_->GetLocations();
341 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
342 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
343
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100344 // temp_ is a scratch register. Make sure it's not used for saving/restoring registers.
345 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
346 temps.Exclude(temp_);
347
Vladimir Markoaad75c62016-10-03 08:46:48 +0000348 __ Bind(GetEntryLabel());
349 SaveLiveRegisters(codegen, locations);
350
351 InvokeRuntimeCallingConvention calling_convention;
352 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
353 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
354 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
355 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
356 Primitive::Type type = instruction_->GetType();
357 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
358
359 RestoreLiveRegisters(codegen, locations);
360
361 // Store the resolved String to the BSS entry.
Vladimir Markoaad75c62016-10-03 08:46:48 +0000362 const DexFile& dex_file = instruction_->AsLoadString()->GetDexFile();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100363 if (!kUseReadBarrier || kUseBakerReadBarrier) {
364 // The string entry page address was preserved in temp_ thanks to kSaveEverything.
365 } else {
366 // For non-Baker read barrier, we need to re-calculate the address of the string entry page.
367 adrp_label_ = arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index);
368 arm64_codegen->EmitAdrpPlaceholder(adrp_label_, temp_);
369 }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000370 vixl::aarch64::Label* strp_label =
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100371 arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index, adrp_label_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000372 {
373 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
374 __ Bind(strp_label);
375 __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100376 MemOperand(temp_, /* offset placeholder */ 0));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000377 }
378
379 __ B(GetExitLabel());
380 }
381
382 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
383
384 private:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100385 const Register temp_;
386 vixl::aarch64::Label* adrp_label_;
387
Vladimir Markoaad75c62016-10-03 08:46:48 +0000388 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
389};
390
Alexandre Rames5319def2014-10-23 10:03:10 +0100391class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
392 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000393 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100394
Alexandre Rames67555f72014-11-18 10:55:16 +0000395 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
396 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100397 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000398 if (instruction_->CanThrowIntoCatchBlock()) {
399 // Live registers will be restored in the catch block if caught.
400 SaveLiveRegisters(codegen, instruction_->GetLocations());
401 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000402 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
403 instruction_,
404 instruction_->GetDexPc(),
405 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800406 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100407 }
408
Alexandre Rames8158f282015-08-07 10:26:17 +0100409 bool IsFatal() const OVERRIDE { return true; }
410
Alexandre Rames9931f312015-06-19 14:47:01 +0100411 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
412
Alexandre Rames5319def2014-10-23 10:03:10 +0100413 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100414 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
415};
416
417class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
418 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100419 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000420 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100421
Alexandre Rames67555f72014-11-18 10:55:16 +0000422 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
423 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100424 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000425 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800426 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000427 if (successor_ == nullptr) {
428 __ B(GetReturnLabel());
429 } else {
430 __ B(arm64_codegen->GetLabelOf(successor_));
431 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100432 }
433
Scott Wakeling97c72b72016-06-24 16:19:36 +0100434 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100435 DCHECK(successor_ == nullptr);
436 return &return_label_;
437 }
438
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100439 HBasicBlock* GetSuccessor() const {
440 return successor_;
441 }
442
Alexandre Rames9931f312015-06-19 14:47:01 +0100443 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
444
Alexandre Rames5319def2014-10-23 10:03:10 +0100445 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100446 // If not null, the block to branch to after the suspend check.
447 HBasicBlock* const successor_;
448
449 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100450 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100451
452 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
453};
454
Alexandre Rames67555f72014-11-18 10:55:16 +0000455class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
456 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000457 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000458 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000459
460 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000461 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100462 Location class_to_check = locations->InAt(1);
463 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
464 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000465 DCHECK(instruction_->IsCheckCast()
466 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
467 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100468 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000469
Alexandre Rames67555f72014-11-18 10:55:16 +0000470 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000471
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000472 if (!is_fatal_) {
473 SaveLiveRegisters(codegen, locations);
474 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000475
476 // We're moving two locations to locations that could overlap, so we need a parallel
477 // move resolver.
478 InvokeRuntimeCallingConvention calling_convention;
479 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100480 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
481 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000482
483 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000484 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Andreas Gampe67409972016-07-19 22:34:53 -0700485 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t,
Roland Levillain888d0672015-11-23 18:53:50 +0000486 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000487 Primitive::Type ret_type = instruction_->GetType();
488 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
489 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
490 } else {
491 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000492 arm64_codegen->InvokeRuntime(kQuickCheckCast, instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800493 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000494 }
495
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000496 if (!is_fatal_) {
497 RestoreLiveRegisters(codegen, locations);
498 __ B(GetExitLabel());
499 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000500 }
501
Alexandre Rames9931f312015-06-19 14:47:01 +0100502 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100503 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100504
Alexandre Rames67555f72014-11-18 10:55:16 +0000505 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000506 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000507
Alexandre Rames67555f72014-11-18 10:55:16 +0000508 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
509};
510
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700511class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
512 public:
Aart Bik42249c32016-01-07 15:33:50 -0800513 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000514 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700515
516 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800517 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700518 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000519 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000520 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700521 }
522
Alexandre Rames9931f312015-06-19 14:47:01 +0100523 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
524
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700525 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700526 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
527};
528
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100529class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
530 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000531 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100532
533 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
534 LocationSummary* locations = instruction_->GetLocations();
535 __ Bind(GetEntryLabel());
536 SaveLiveRegisters(codegen, locations);
537
538 InvokeRuntimeCallingConvention calling_convention;
539 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
540 parallel_move.AddMove(
541 locations->InAt(0),
542 LocationFrom(calling_convention.GetRegisterAt(0)),
543 Primitive::kPrimNot,
544 nullptr);
545 parallel_move.AddMove(
546 locations->InAt(1),
547 LocationFrom(calling_convention.GetRegisterAt(1)),
548 Primitive::kPrimInt,
549 nullptr);
550 parallel_move.AddMove(
551 locations->InAt(2),
552 LocationFrom(calling_convention.GetRegisterAt(2)),
553 Primitive::kPrimNot,
554 nullptr);
555 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
556
557 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000558 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100559 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
560 RestoreLiveRegisters(codegen, locations);
561 __ B(GetExitLabel());
562 }
563
564 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
565
566 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100567 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
568};
569
Zheng Xu3927c8b2015-11-18 17:46:25 +0800570void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
571 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000572 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800573
574 // We are about to use the assembler to place literals directly. Make sure we have enough
575 // underlying code buffer and we have generated the jump table with right size.
576 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
577 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
578
579 __ Bind(&table_start_);
580 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
581 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100582 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800583 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100584 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800585 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
586 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
587 Literal<int32_t> literal(jump_offset);
588 __ place(&literal);
589 }
590}
591
Roland Levillain44015862016-01-22 11:47:17 +0000592// Slow path marking an object during a read barrier.
593class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
594 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100595 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location obj)
596 : SlowPathCodeARM64(instruction), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000597 DCHECK(kEmitCompilerReadBarrier);
598 }
599
600 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
601
602 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
603 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain44015862016-01-22 11:47:17 +0000604 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100605 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(obj_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000606 DCHECK(instruction_->IsInstanceFieldGet() ||
607 instruction_->IsStaticFieldGet() ||
608 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100609 instruction_->IsArraySet() ||
Roland Levillain44015862016-01-22 11:47:17 +0000610 instruction_->IsLoadClass() ||
611 instruction_->IsLoadString() ||
612 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100613 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100614 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
615 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000616 << "Unexpected instruction in read barrier marking slow path: "
617 << instruction_->DebugName();
618
619 __ Bind(GetEntryLabel());
Roland Levillain4359e612016-07-20 11:32:19 +0100620 // No need to save live registers; it's taken care of by the
621 // entrypoint. Also, there is no need to update the stack mask,
622 // as this runtime call will not trigger a garbage collection.
Roland Levillain44015862016-01-22 11:47:17 +0000623 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100624 DCHECK_NE(obj_.reg(), LR);
625 DCHECK_NE(obj_.reg(), WSP);
626 DCHECK_NE(obj_.reg(), WZR);
Roland Levillain0b671c02016-08-19 12:02:34 +0100627 // IP0 is used internally by the ReadBarrierMarkRegX entry point
628 // as a temporary, it cannot be the entry point's input/output.
Mathieu Chartier36a270a2016-07-28 18:08:51 -0700629 DCHECK_NE(obj_.reg(), IP0);
Roland Levillain02b75802016-07-13 11:54:35 +0100630 DCHECK(0 <= obj_.reg() && obj_.reg() < kNumberOfWRegisters) << obj_.reg();
631 // "Compact" slow path, saving two moves.
632 //
633 // Instead of using the standard runtime calling convention (input
634 // and output in W0):
635 //
636 // W0 <- obj
637 // W0 <- ReadBarrierMark(W0)
638 // obj <- W0
639 //
640 // we just use rX (the register holding `obj`) as input and output
641 // of a dedicated entrypoint:
642 //
643 // rX <- ReadBarrierMarkRegX(rX)
644 //
645 int32_t entry_point_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -0700646 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(obj_.reg());
Roland Levillaindec8f632016-07-22 17:10:06 +0100647 // This runtime call does not require a stack map.
648 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain44015862016-01-22 11:47:17 +0000649 __ B(GetExitLabel());
650 }
651
652 private:
Roland Levillain44015862016-01-22 11:47:17 +0000653 const Location obj_;
654
655 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
656};
657
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000658// Slow path generating a read barrier for a heap reference.
659class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
660 public:
661 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
662 Location out,
663 Location ref,
664 Location obj,
665 uint32_t offset,
666 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000667 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000668 out_(out),
669 ref_(ref),
670 obj_(obj),
671 offset_(offset),
672 index_(index) {
673 DCHECK(kEmitCompilerReadBarrier);
674 // If `obj` is equal to `out` or `ref`, it means the initial object
675 // has been overwritten by (or after) the heap object reference load
676 // to be instrumented, e.g.:
677 //
678 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000679 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000680 //
681 // In that case, we have lost the information about the original
682 // object, and the emitted read barrier cannot work properly.
683 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
684 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
685 }
686
687 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
688 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
689 LocationSummary* locations = instruction_->GetLocations();
690 Primitive::Type type = Primitive::kPrimNot;
691 DCHECK(locations->CanCall());
692 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100693 DCHECK(instruction_->IsInstanceFieldGet() ||
694 instruction_->IsStaticFieldGet() ||
695 instruction_->IsArrayGet() ||
696 instruction_->IsInstanceOf() ||
697 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100698 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000699 << "Unexpected instruction in read barrier for heap reference slow path: "
700 << instruction_->DebugName();
Roland Levillain4a3aa572016-08-15 13:17:06 +0000701 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000702 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100703 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000704
705 __ Bind(GetEntryLabel());
706
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000707 SaveLiveRegisters(codegen, locations);
708
709 // We may have to change the index's value, but as `index_` is a
710 // constant member (like other "inputs" of this slow path),
711 // introduce a copy of it, `index`.
712 Location index = index_;
713 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100714 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000715 if (instruction_->IsArrayGet()) {
716 // Compute the actual memory offset and store it in `index`.
717 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
718 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
719 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
720 // We are about to change the value of `index_reg` (see the
721 // calls to vixl::MacroAssembler::Lsl and
722 // vixl::MacroAssembler::Mov below), but it has
723 // not been saved by the previous call to
724 // art::SlowPathCode::SaveLiveRegisters, as it is a
725 // callee-save register --
726 // art::SlowPathCode::SaveLiveRegisters does not consider
727 // callee-save registers, as it has been designed with the
728 // assumption that callee-save registers are supposed to be
729 // handled by the called function. So, as a callee-save
730 // register, `index_reg` _would_ eventually be saved onto
731 // the stack, but it would be too late: we would have
732 // changed its value earlier. Therefore, we manually save
733 // it here into another freely available register,
734 // `free_reg`, chosen of course among the caller-save
735 // registers (as a callee-save `free_reg` register would
736 // exhibit the same problem).
737 //
738 // Note we could have requested a temporary register from
739 // the register allocator instead; but we prefer not to, as
740 // this is a slow path, and we know we can find a
741 // caller-save register that is available.
742 Register free_reg = FindAvailableCallerSaveRegister(codegen);
743 __ Mov(free_reg.W(), index_reg);
744 index_reg = free_reg;
745 index = LocationFrom(index_reg);
746 } else {
747 // The initial register stored in `index_` has already been
748 // saved in the call to art::SlowPathCode::SaveLiveRegisters
749 // (as it is not a callee-save register), so we can freely
750 // use it.
751 }
752 // Shifting the index value contained in `index_reg` by the scale
753 // factor (2) cannot overflow in practice, as the runtime is
754 // unable to allocate object arrays with a size larger than
755 // 2^26 - 1 (that is, 2^28 - 4 bytes).
756 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
757 static_assert(
758 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
759 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
760 __ Add(index_reg, index_reg, Operand(offset_));
761 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100762 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
763 // intrinsics, `index_` is not shifted by a scale factor of 2
764 // (as in the case of ArrayGet), as it is actually an offset
765 // to an object field within an object.
766 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000767 DCHECK(instruction_->GetLocations()->Intrinsified());
768 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
769 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
770 << instruction_->AsInvoke()->GetIntrinsic();
771 DCHECK_EQ(offset_, 0U);
Roland Levillaina7426c62016-08-03 15:02:10 +0100772 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000773 }
774 }
775
776 // We're moving two or three locations to locations that could
777 // overlap, so we need a parallel move resolver.
778 InvokeRuntimeCallingConvention calling_convention;
779 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
780 parallel_move.AddMove(ref_,
781 LocationFrom(calling_convention.GetRegisterAt(0)),
782 type,
783 nullptr);
784 parallel_move.AddMove(obj_,
785 LocationFrom(calling_convention.GetRegisterAt(1)),
786 type,
787 nullptr);
788 if (index.IsValid()) {
789 parallel_move.AddMove(index,
790 LocationFrom(calling_convention.GetRegisterAt(2)),
791 Primitive::kPrimInt,
792 nullptr);
793 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
794 } else {
795 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
796 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
797 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000798 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000799 instruction_,
800 instruction_->GetDexPc(),
801 this);
802 CheckEntrypointTypes<
803 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
804 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
805
806 RestoreLiveRegisters(codegen, locations);
807
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000808 __ B(GetExitLabel());
809 }
810
811 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
812
813 private:
814 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100815 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
816 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000817 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
818 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
819 return Register(VIXLRegCodeFromART(i), kXRegSize);
820 }
821 }
822 // We shall never fail to find a free caller-save register, as
823 // there are more than two core caller-save registers on ARM64
824 // (meaning it is possible to find one which is different from
825 // `ref` and `obj`).
826 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
827 LOG(FATAL) << "Could not find a free register";
828 UNREACHABLE();
829 }
830
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000831 const Location out_;
832 const Location ref_;
833 const Location obj_;
834 const uint32_t offset_;
835 // An additional location containing an index to an array.
836 // Only used for HArrayGet and the UnsafeGetObject &
837 // UnsafeGetObjectVolatile intrinsics.
838 const Location index_;
839
840 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
841};
842
843// Slow path generating a read barrier for a GC root.
844class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
845 public:
846 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000847 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000848 DCHECK(kEmitCompilerReadBarrier);
849 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000850
851 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
852 LocationSummary* locations = instruction_->GetLocations();
853 Primitive::Type type = Primitive::kPrimNot;
854 DCHECK(locations->CanCall());
855 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000856 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
857 << "Unexpected instruction in read barrier for GC root slow path: "
858 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000859
860 __ Bind(GetEntryLabel());
861 SaveLiveRegisters(codegen, locations);
862
863 InvokeRuntimeCallingConvention calling_convention;
864 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
865 // The argument of the ReadBarrierForRootSlow is not a managed
866 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
867 // thus we need a 64-bit move here, and we cannot use
868 //
869 // arm64_codegen->MoveLocation(
870 // LocationFrom(calling_convention.GetRegisterAt(0)),
871 // root_,
872 // type);
873 //
874 // which would emit a 32-bit move, as `type` is a (32-bit wide)
875 // reference type (`Primitive::kPrimNot`).
876 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000877 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000878 instruction_,
879 instruction_->GetDexPc(),
880 this);
881 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
882 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
883
884 RestoreLiveRegisters(codegen, locations);
885 __ B(GetExitLabel());
886 }
887
888 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
889
890 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000891 const Location out_;
892 const Location root_;
893
894 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
895};
896
Alexandre Rames5319def2014-10-23 10:03:10 +0100897#undef __
898
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100899Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100900 Location next_location;
901 if (type == Primitive::kPrimVoid) {
902 LOG(FATAL) << "Unreachable type " << type;
903 }
904
Alexandre Rames542361f2015-01-29 16:57:31 +0000905 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100906 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
907 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000908 } else if (!Primitive::IsFloatingPointType(type) &&
909 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000910 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
911 } else {
912 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000913 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
914 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100915 }
916
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000917 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000918 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100919 return next_location;
920}
921
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100922Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100923 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100924}
925
Serban Constantinescu579885a2015-02-22 20:51:33 +0000926CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
927 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100928 const CompilerOptions& compiler_options,
929 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100930 : CodeGenerator(graph,
931 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000932 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000933 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100934 callee_saved_core_registers.GetList(),
935 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100936 compiler_options,
937 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100938 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800939 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100940 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000941 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000942 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100943 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000944 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000945 uint32_literals_(std::less<uint32_t>(),
946 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100947 uint64_literals_(std::less<uint64_t>(),
948 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
949 method_patches_(MethodReferenceComparator(),
950 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
951 call_patches_(MethodReferenceComparator(),
952 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
953 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000954 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
955 boot_image_string_patches_(StringReferenceValueComparator(),
956 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
957 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100958 boot_image_type_patches_(TypeReferenceValueComparator(),
959 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
960 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000961 boot_image_address_patches_(std::less<uint32_t>(),
962 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000963 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000964 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000965}
Alexandre Rames5319def2014-10-23 10:03:10 +0100966
Alexandre Rames67555f72014-11-18 10:55:16 +0000967#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100968
Zheng Xu3927c8b2015-11-18 17:46:25 +0800969void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100970 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800971 jump_table->EmitTable(this);
972 }
973}
974
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000975void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800976 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000977 // Ensure we emit the literal pool.
978 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000979
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000980 CodeGenerator::Finalize(allocator);
981}
982
Zheng Xuad4450e2015-04-17 18:48:56 +0800983void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
984 // Note: There are 6 kinds of moves:
985 // 1. constant -> GPR/FPR (non-cycle)
986 // 2. constant -> stack (non-cycle)
987 // 3. GPR/FPR -> GPR/FPR
988 // 4. GPR/FPR -> stack
989 // 5. stack -> GPR/FPR
990 // 6. stack -> stack (non-cycle)
991 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
992 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
993 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
994 // dependency.
995 vixl_temps_.Open(GetVIXLAssembler());
996}
997
998void ParallelMoveResolverARM64::FinishEmitNativeCode() {
999 vixl_temps_.Close();
1000}
1001
1002Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
1003 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
1004 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
1005 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
1006 Location scratch = GetScratchLocation(kind);
1007 if (!scratch.Equals(Location::NoLocation())) {
1008 return scratch;
1009 }
1010 // Allocate from VIXL temp registers.
1011 if (kind == Location::kRegister) {
1012 scratch = LocationFrom(vixl_temps_.AcquireX());
1013 } else {
1014 DCHECK(kind == Location::kFpuRegister);
1015 scratch = LocationFrom(vixl_temps_.AcquireD());
1016 }
1017 AddScratchLocation(scratch);
1018 return scratch;
1019}
1020
1021void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1022 if (loc.IsRegister()) {
1023 vixl_temps_.Release(XRegisterFrom(loc));
1024 } else {
1025 DCHECK(loc.IsFpuRegister());
1026 vixl_temps_.Release(DRegisterFrom(loc));
1027 }
1028 RemoveScratchLocation(loc);
1029}
1030
Alexandre Rames3e69f162014-12-10 10:36:50 +00001031void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001032 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001033 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001034}
1035
Alexandre Rames5319def2014-10-23 10:03:10 +01001036void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001037 MacroAssembler* masm = GetVIXLAssembler();
1038 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001039 __ Bind(&frame_entry_label_);
1040
Serban Constantinescu02164b32014-11-13 14:05:07 +00001041 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1042 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001043 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001044 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001045 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001046 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001047 __ Ldr(wzr, MemOperand(temp, 0));
1048 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001049 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001050
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001051 if (!HasEmptyFrame()) {
1052 int frame_size = GetFrameSize();
1053 // Stack layout:
1054 // sp[frame_size - 8] : lr.
1055 // ... : other preserved core registers.
1056 // ... : other preserved fp registers.
1057 // ... : reserved frame space.
1058 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001059
1060 // Save the current method if we need it. Note that we do not
1061 // do this in HCurrentMethod, as the instruction might have been removed
1062 // in the SSA graph.
1063 if (RequiresCurrentMethod()) {
1064 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001065 } else {
1066 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001067 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001068 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001069 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1070 frame_size - GetCoreSpillSize());
1071 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1072 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001073 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001074}
1075
1076void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001077 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001078 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001079 if (!HasEmptyFrame()) {
1080 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001081 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1082 frame_size - FrameEntrySpillSize());
1083 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1084 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001085 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001086 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001087 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001088 __ Ret();
1089 GetAssembler()->cfi().RestoreState();
1090 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001091}
1092
Scott Wakeling97c72b72016-06-24 16:19:36 +01001093CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001094 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001095 return CPURegList(CPURegister::kRegister, kXRegSize,
1096 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001097}
1098
Scott Wakeling97c72b72016-06-24 16:19:36 +01001099CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001100 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1101 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001102 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1103 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001104}
1105
Alexandre Rames5319def2014-10-23 10:03:10 +01001106void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1107 __ Bind(GetLabelOf(block));
1108}
1109
Calin Juravle175dc732015-08-25 15:42:32 +01001110void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1111 DCHECK(location.IsRegister());
1112 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1113}
1114
Calin Juravlee460d1d2015-09-29 04:52:17 +01001115void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1116 if (location.IsRegister()) {
1117 locations->AddTemp(location);
1118 } else {
1119 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1120 }
1121}
1122
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001123void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001124 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001125 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001126 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001127 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001128 if (value_can_be_null) {
1129 __ Cbz(value, &done);
1130 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001131 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001132 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001133 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001134 if (value_can_be_null) {
1135 __ Bind(&done);
1136 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001137}
1138
David Brazdil58282f42016-01-14 12:45:10 +00001139void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001140 // Blocked core registers:
1141 // lr : Runtime reserved.
1142 // tr : Runtime reserved.
1143 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1144 // ip1 : VIXL core temp.
1145 // ip0 : VIXL core temp.
1146 //
1147 // Blocked fp registers:
1148 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001149 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1150 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001151 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001152 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001153 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001154
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001155 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001156 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001157 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001158 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001159
David Brazdil58282f42016-01-14 12:45:10 +00001160 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001161 // Stubs do not save callee-save floating point registers. If the graph
1162 // is debuggable, we need to deal with these registers differently. For
1163 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001164 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1165 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001166 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001167 }
1168 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001169}
1170
Alexandre Rames3e69f162014-12-10 10:36:50 +00001171size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1172 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1173 __ Str(reg, MemOperand(sp, stack_index));
1174 return kArm64WordSize;
1175}
1176
1177size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1178 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1179 __ Ldr(reg, MemOperand(sp, stack_index));
1180 return kArm64WordSize;
1181}
1182
1183size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1184 FPRegister reg = FPRegister(reg_id, kDRegSize);
1185 __ Str(reg, MemOperand(sp, stack_index));
1186 return kArm64WordSize;
1187}
1188
1189size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1190 FPRegister reg = FPRegister(reg_id, kDRegSize);
1191 __ Ldr(reg, MemOperand(sp, stack_index));
1192 return kArm64WordSize;
1193}
1194
Alexandre Rames5319def2014-10-23 10:03:10 +01001195void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001196 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001197}
1198
1199void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001200 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001201}
1202
Alexandre Rames67555f72014-11-18 10:55:16 +00001203void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001204 if (constant->IsIntConstant()) {
1205 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1206 } else if (constant->IsLongConstant()) {
1207 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1208 } else if (constant->IsNullConstant()) {
1209 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001210 } else if (constant->IsFloatConstant()) {
1211 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1212 } else {
1213 DCHECK(constant->IsDoubleConstant());
1214 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1215 }
1216}
1217
Alexandre Rames3e69f162014-12-10 10:36:50 +00001218
1219static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1220 DCHECK(constant.IsConstant());
1221 HConstant* cst = constant.GetConstant();
1222 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001223 // Null is mapped to a core W register, which we associate with kPrimInt.
1224 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001225 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1226 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1227 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1228}
1229
Calin Juravlee460d1d2015-09-29 04:52:17 +01001230void CodeGeneratorARM64::MoveLocation(Location destination,
1231 Location source,
1232 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001233 if (source.Equals(destination)) {
1234 return;
1235 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001236
1237 // A valid move can always be inferred from the destination and source
1238 // locations. When moving from and to a register, the argument type can be
1239 // used to generate 32bit instead of 64bit moves. In debug mode we also
1240 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001241 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001242
1243 if (destination.IsRegister() || destination.IsFpuRegister()) {
1244 if (unspecified_type) {
1245 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1246 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001247 (src_cst != nullptr && (src_cst->IsIntConstant()
1248 || src_cst->IsFloatConstant()
1249 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001250 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001251 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001252 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001253 // If the source is a double stack slot or a 64bit constant, a 64bit
1254 // type is appropriate. Else the source is a register, and since the
1255 // type has not been specified, we chose a 64bit type to force a 64bit
1256 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001257 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001258 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001259 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001260 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1261 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1262 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001263 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1264 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1265 __ Ldr(dst, StackOperandFrom(source));
1266 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001267 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001268 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001269 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001270 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001271 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001272 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001273 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001274 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1275 ? Primitive::kPrimLong
1276 : Primitive::kPrimInt;
1277 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1278 }
1279 } else {
1280 DCHECK(source.IsFpuRegister());
1281 if (destination.IsRegister()) {
1282 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1283 ? Primitive::kPrimDouble
1284 : Primitive::kPrimFloat;
1285 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1286 } else {
1287 DCHECK(destination.IsFpuRegister());
1288 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001289 }
1290 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001291 } else { // The destination is not a register. It must be a stack slot.
1292 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1293 if (source.IsRegister() || source.IsFpuRegister()) {
1294 if (unspecified_type) {
1295 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001296 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001297 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001298 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001299 }
1300 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001301 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1302 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1303 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001304 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001305 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1306 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001307 UseScratchRegisterScope temps(GetVIXLAssembler());
1308 HConstant* src_cst = source.GetConstant();
1309 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001310 if (src_cst->IsZeroBitPattern()) {
1311 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant()) ? xzr : wzr;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001312 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001313 if (src_cst->IsIntConstant()) {
1314 temp = temps.AcquireW();
1315 } else if (src_cst->IsLongConstant()) {
1316 temp = temps.AcquireX();
1317 } else if (src_cst->IsFloatConstant()) {
1318 temp = temps.AcquireS();
1319 } else {
1320 DCHECK(src_cst->IsDoubleConstant());
1321 temp = temps.AcquireD();
1322 }
1323 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001324 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001325 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001326 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001327 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001328 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001329 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001330 // There is generally less pressure on FP registers.
1331 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001332 __ Ldr(temp, StackOperandFrom(source));
1333 __ Str(temp, StackOperandFrom(destination));
1334 }
1335 }
1336}
1337
1338void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001339 CPURegister dst,
1340 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001341 switch (type) {
1342 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001343 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001344 break;
1345 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001346 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001347 break;
1348 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001349 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001350 break;
1351 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001352 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001353 break;
1354 case Primitive::kPrimInt:
1355 case Primitive::kPrimNot:
1356 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001357 case Primitive::kPrimFloat:
1358 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001359 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001360 __ Ldr(dst, src);
1361 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001362 case Primitive::kPrimVoid:
1363 LOG(FATAL) << "Unreachable type " << type;
1364 }
1365}
1366
Calin Juravle77520bc2015-01-12 18:45:46 +00001367void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001368 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001369 const MemOperand& src,
1370 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001371 MacroAssembler* masm = GetVIXLAssembler();
1372 BlockPoolsScope block_pools(masm);
1373 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001374 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001375 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001376
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001377 DCHECK(!src.IsPreIndex());
1378 DCHECK(!src.IsPostIndex());
1379
1380 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001381 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001382 MemOperand base = MemOperand(temp_base);
1383 switch (type) {
1384 case Primitive::kPrimBoolean:
1385 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001386 if (needs_null_check) {
1387 MaybeRecordImplicitNullCheck(instruction);
1388 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001389 break;
1390 case Primitive::kPrimByte:
1391 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001392 if (needs_null_check) {
1393 MaybeRecordImplicitNullCheck(instruction);
1394 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001395 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1396 break;
1397 case Primitive::kPrimChar:
1398 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001399 if (needs_null_check) {
1400 MaybeRecordImplicitNullCheck(instruction);
1401 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001402 break;
1403 case Primitive::kPrimShort:
1404 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001405 if (needs_null_check) {
1406 MaybeRecordImplicitNullCheck(instruction);
1407 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001408 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1409 break;
1410 case Primitive::kPrimInt:
1411 case Primitive::kPrimNot:
1412 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001413 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001414 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001415 if (needs_null_check) {
1416 MaybeRecordImplicitNullCheck(instruction);
1417 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001418 break;
1419 case Primitive::kPrimFloat:
1420 case Primitive::kPrimDouble: {
1421 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001422 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001423
1424 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1425 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001426 if (needs_null_check) {
1427 MaybeRecordImplicitNullCheck(instruction);
1428 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001429 __ Fmov(FPRegister(dst), temp);
1430 break;
1431 }
1432 case Primitive::kPrimVoid:
1433 LOG(FATAL) << "Unreachable type " << type;
1434 }
1435}
1436
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001437void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001438 CPURegister src,
1439 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001440 switch (type) {
1441 case Primitive::kPrimBoolean:
1442 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001443 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001444 break;
1445 case Primitive::kPrimChar:
1446 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001447 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001448 break;
1449 case Primitive::kPrimInt:
1450 case Primitive::kPrimNot:
1451 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001452 case Primitive::kPrimFloat:
1453 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001454 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001455 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001456 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001457 case Primitive::kPrimVoid:
1458 LOG(FATAL) << "Unreachable type " << type;
1459 }
1460}
1461
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001462void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1463 CPURegister src,
1464 const MemOperand& dst) {
1465 UseScratchRegisterScope temps(GetVIXLAssembler());
1466 Register temp_base = temps.AcquireX();
1467
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001468 DCHECK(!dst.IsPreIndex());
1469 DCHECK(!dst.IsPostIndex());
1470
1471 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001472 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001473 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001474 MemOperand base = MemOperand(temp_base);
1475 switch (type) {
1476 case Primitive::kPrimBoolean:
1477 case Primitive::kPrimByte:
1478 __ Stlrb(Register(src), base);
1479 break;
1480 case Primitive::kPrimChar:
1481 case Primitive::kPrimShort:
1482 __ Stlrh(Register(src), base);
1483 break;
1484 case Primitive::kPrimInt:
1485 case Primitive::kPrimNot:
1486 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001487 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001488 __ Stlr(Register(src), base);
1489 break;
1490 case Primitive::kPrimFloat:
1491 case Primitive::kPrimDouble: {
Alexandre Rames542361f2015-01-29 16:57:31 +00001492 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001493 Register temp_src;
1494 if (src.IsZero()) {
1495 // The zero register is used to avoid synthesizing zero constants.
1496 temp_src = Register(src);
1497 } else {
1498 DCHECK(src.IsFPRegister());
1499 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1500 __ Fmov(temp_src, FPRegister(src));
1501 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001502
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001503 __ Stlr(temp_src, base);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001504 break;
1505 }
1506 case Primitive::kPrimVoid:
1507 LOG(FATAL) << "Unreachable type " << type;
1508 }
1509}
1510
Calin Juravle175dc732015-08-25 15:42:32 +01001511void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1512 HInstruction* instruction,
1513 uint32_t dex_pc,
1514 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001515 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001516 GenerateInvokeRuntime(GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value());
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001517 if (EntrypointRequiresStackMap(entrypoint)) {
1518 RecordPcInfo(instruction, dex_pc, slow_path);
1519 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001520}
1521
Roland Levillaindec8f632016-07-22 17:10:06 +01001522void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1523 HInstruction* instruction,
1524 SlowPathCode* slow_path) {
1525 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001526 GenerateInvokeRuntime(entry_point_offset);
1527}
1528
1529void CodeGeneratorARM64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001530 BlockPoolsScope block_pools(GetVIXLAssembler());
1531 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1532 __ Blr(lr);
1533}
1534
Alexandre Rames67555f72014-11-18 10:55:16 +00001535void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001536 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001537 UseScratchRegisterScope temps(GetVIXLAssembler());
1538 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001539 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1540
Serban Constantinescu02164b32014-11-13 14:05:07 +00001541 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001542 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1543 __ Add(temp, class_reg, status_offset);
1544 __ Ldar(temp, HeapOperand(temp));
1545 __ Cmp(temp, mirror::Class::kStatusInitialized);
1546 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001547 __ Bind(slow_path->GetExitLabel());
1548}
Alexandre Rames5319def2014-10-23 10:03:10 +01001549
Roland Levillain44015862016-01-22 11:47:17 +00001550void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001551 BarrierType type = BarrierAll;
1552
1553 switch (kind) {
1554 case MemBarrierKind::kAnyAny:
1555 case MemBarrierKind::kAnyStore: {
1556 type = BarrierAll;
1557 break;
1558 }
1559 case MemBarrierKind::kLoadAny: {
1560 type = BarrierReads;
1561 break;
1562 }
1563 case MemBarrierKind::kStoreStore: {
1564 type = BarrierWrites;
1565 break;
1566 }
1567 default:
1568 LOG(FATAL) << "Unexpected memory barrier " << kind;
1569 }
1570 __ Dmb(InnerShareable, type);
1571}
1572
Serban Constantinescu02164b32014-11-13 14:05:07 +00001573void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1574 HBasicBlock* successor) {
1575 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001576 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1577 if (slow_path == nullptr) {
1578 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1579 instruction->SetSlowPath(slow_path);
1580 codegen_->AddSlowPath(slow_path);
1581 if (successor != nullptr) {
1582 DCHECK(successor->IsLoopHeader());
1583 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1584 }
1585 } else {
1586 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1587 }
1588
Serban Constantinescu02164b32014-11-13 14:05:07 +00001589 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1590 Register temp = temps.AcquireW();
1591
Andreas Gampe542451c2016-07-26 09:02:02 -07001592 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001593 if (successor == nullptr) {
1594 __ Cbnz(temp, slow_path->GetEntryLabel());
1595 __ Bind(slow_path->GetReturnLabel());
1596 } else {
1597 __ Cbz(temp, codegen_->GetLabelOf(successor));
1598 __ B(slow_path->GetEntryLabel());
1599 // slow_path will return to GetLabelOf(successor).
1600 }
1601}
1602
Alexandre Rames5319def2014-10-23 10:03:10 +01001603InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1604 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001605 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001606 assembler_(codegen->GetAssembler()),
1607 codegen_(codegen) {}
1608
1609#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001610 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001611
1612#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1613
1614enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001615 // Using a base helps identify when we hit such breakpoints.
1616 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001617#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1618 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1619#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1620};
1621
1622#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001623 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001624 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1625 } \
1626 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1627 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1628 locations->SetOut(Location::Any()); \
1629 }
1630 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1631#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1632
1633#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001634#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001635
Alexandre Rames67555f72014-11-18 10:55:16 +00001636void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001637 DCHECK_EQ(instr->InputCount(), 2U);
1638 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1639 Primitive::Type type = instr->GetResultType();
1640 switch (type) {
1641 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001642 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001643 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001644 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001645 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001646 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001647
1648 case Primitive::kPrimFloat:
1649 case Primitive::kPrimDouble:
1650 locations->SetInAt(0, Location::RequiresFpuRegister());
1651 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001652 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001653 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001654
Alexandre Rames5319def2014-10-23 10:03:10 +01001655 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001656 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001657 }
1658}
1659
Alexandre Rames09a99962015-04-15 11:47:56 +01001660void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001661 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1662
1663 bool object_field_get_with_read_barrier =
1664 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001665 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001666 new (GetGraph()->GetArena()) LocationSummary(instruction,
1667 object_field_get_with_read_barrier ?
1668 LocationSummary::kCallOnSlowPath :
1669 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01001670 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001671 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01001672 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001673 locations->SetInAt(0, Location::RequiresRegister());
1674 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1675 locations->SetOut(Location::RequiresFpuRegister());
1676 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001677 // The output overlaps for an object field get when read barriers
1678 // are enabled: we do not want the load to overwrite the object's
1679 // location, as we need it to emit the read barrier.
1680 locations->SetOut(
1681 Location::RequiresRegister(),
1682 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001683 }
1684}
1685
1686void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1687 const FieldInfo& field_info) {
1688 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001689 LocationSummary* locations = instruction->GetLocations();
1690 Location base_loc = locations->InAt(0);
1691 Location out = locations->Out();
1692 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001693 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001694 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001695 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001696
Roland Levillain44015862016-01-22 11:47:17 +00001697 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1698 // Object FieldGet with Baker's read barrier case.
1699 MacroAssembler* masm = GetVIXLAssembler();
1700 UseScratchRegisterScope temps(masm);
1701 // /* HeapReference<Object> */ out = *(base + offset)
1702 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1703 Register temp = temps.AcquireW();
1704 // Note that potential implicit null checks are handled in this
1705 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1706 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1707 instruction,
1708 out,
1709 base,
1710 offset,
1711 temp,
1712 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001713 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001714 } else {
1715 // General case.
1716 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001717 // Note that a potential implicit null check is handled in this
1718 // CodeGeneratorARM64::LoadAcquire call.
1719 // NB: LoadAcquire will record the pc info if needed.
1720 codegen_->LoadAcquire(
1721 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001722 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001723 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001724 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001725 }
Roland Levillain44015862016-01-22 11:47:17 +00001726 if (field_type == Primitive::kPrimNot) {
1727 // If read barriers are enabled, emit read barriers other than
1728 // Baker's using a slow path (and also unpoison the loaded
1729 // reference, if heap poisoning is enabled).
1730 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1731 }
Roland Levillain4d027112015-07-01 15:41:14 +01001732 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001733}
1734
1735void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1736 LocationSummary* locations =
1737 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1738 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001739 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
1740 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
1741 } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001742 locations->SetInAt(1, Location::RequiresFpuRegister());
1743 } else {
1744 locations->SetInAt(1, Location::RequiresRegister());
1745 }
1746}
1747
1748void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001749 const FieldInfo& field_info,
1750 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001751 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001752 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001753
1754 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001755 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001756 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001757 Offset offset = field_info.GetFieldOffset();
1758 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001759
Roland Levillain4d027112015-07-01 15:41:14 +01001760 {
1761 // We use a block to end the scratch scope before the write barrier, thus
1762 // freeing the temporary registers so they can be used in `MarkGCCard`.
1763 UseScratchRegisterScope temps(GetVIXLAssembler());
1764
1765 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1766 DCHECK(value.IsW());
1767 Register temp = temps.AcquireW();
1768 __ Mov(temp, value.W());
1769 GetAssembler()->PoisonHeapReference(temp.W());
1770 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001771 }
Roland Levillain4d027112015-07-01 15:41:14 +01001772
1773 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001774 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1775 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001776 } else {
1777 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1778 codegen_->MaybeRecordImplicitNullCheck(instruction);
1779 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001780 }
1781
1782 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001783 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001784 }
1785}
1786
Alexandre Rames67555f72014-11-18 10:55:16 +00001787void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001788 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001789
1790 switch (type) {
1791 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001792 case Primitive::kPrimLong: {
1793 Register dst = OutputRegister(instr);
1794 Register lhs = InputRegisterAt(instr, 0);
1795 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001796 if (instr->IsAdd()) {
1797 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001798 } else if (instr->IsAnd()) {
1799 __ And(dst, lhs, rhs);
1800 } else if (instr->IsOr()) {
1801 __ Orr(dst, lhs, rhs);
1802 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001803 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001804 } else if (instr->IsRor()) {
1805 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001806 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001807 __ Ror(dst, lhs, shift);
1808 } else {
1809 // Ensure shift distance is in the same size register as the result. If
1810 // we are rotating a long and the shift comes in a w register originally,
1811 // we don't need to sxtw for use as an x since the shift distances are
1812 // all & reg_bits - 1.
1813 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1814 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001815 } else {
1816 DCHECK(instr->IsXor());
1817 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001818 }
1819 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001820 }
1821 case Primitive::kPrimFloat:
1822 case Primitive::kPrimDouble: {
1823 FPRegister dst = OutputFPRegister(instr);
1824 FPRegister lhs = InputFPRegisterAt(instr, 0);
1825 FPRegister rhs = InputFPRegisterAt(instr, 1);
1826 if (instr->IsAdd()) {
1827 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001828 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001829 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001830 } else {
1831 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001832 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001833 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001834 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001835 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001836 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001837 }
1838}
1839
Serban Constantinescu02164b32014-11-13 14:05:07 +00001840void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1841 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1842
1843 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1844 Primitive::Type type = instr->GetResultType();
1845 switch (type) {
1846 case Primitive::kPrimInt:
1847 case Primitive::kPrimLong: {
1848 locations->SetInAt(0, Location::RequiresRegister());
1849 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1850 locations->SetOut(Location::RequiresRegister());
1851 break;
1852 }
1853 default:
1854 LOG(FATAL) << "Unexpected shift type " << type;
1855 }
1856}
1857
1858void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1859 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1860
1861 Primitive::Type type = instr->GetType();
1862 switch (type) {
1863 case Primitive::kPrimInt:
1864 case Primitive::kPrimLong: {
1865 Register dst = OutputRegister(instr);
1866 Register lhs = InputRegisterAt(instr, 0);
1867 Operand rhs = InputOperandAt(instr, 1);
1868 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001869 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00001870 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001871 if (instr->IsShl()) {
1872 __ Lsl(dst, lhs, shift_value);
1873 } else if (instr->IsShr()) {
1874 __ Asr(dst, lhs, shift_value);
1875 } else {
1876 __ Lsr(dst, lhs, shift_value);
1877 }
1878 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001879 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001880
1881 if (instr->IsShl()) {
1882 __ Lsl(dst, lhs, rhs_reg);
1883 } else if (instr->IsShr()) {
1884 __ Asr(dst, lhs, rhs_reg);
1885 } else {
1886 __ Lsr(dst, lhs, rhs_reg);
1887 }
1888 }
1889 break;
1890 }
1891 default:
1892 LOG(FATAL) << "Unexpected shift operation type " << type;
1893 }
1894}
1895
Alexandre Rames5319def2014-10-23 10:03:10 +01001896void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001897 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001898}
1899
1900void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001901 HandleBinaryOp(instruction);
1902}
1903
1904void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1905 HandleBinaryOp(instruction);
1906}
1907
1908void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1909 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001910}
1911
Artem Serov7fc63502016-02-09 17:15:29 +00001912void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001913 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1914 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1915 locations->SetInAt(0, Location::RequiresRegister());
1916 // There is no immediate variant of negated bitwise instructions in AArch64.
1917 locations->SetInAt(1, Location::RequiresRegister());
1918 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1919}
1920
Artem Serov7fc63502016-02-09 17:15:29 +00001921void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001922 Register dst = OutputRegister(instr);
1923 Register lhs = InputRegisterAt(instr, 0);
1924 Register rhs = InputRegisterAt(instr, 1);
1925
1926 switch (instr->GetOpKind()) {
1927 case HInstruction::kAnd:
1928 __ Bic(dst, lhs, rhs);
1929 break;
1930 case HInstruction::kOr:
1931 __ Orn(dst, lhs, rhs);
1932 break;
1933 case HInstruction::kXor:
1934 __ Eon(dst, lhs, rhs);
1935 break;
1936 default:
1937 LOG(FATAL) << "Unreachable";
1938 }
1939}
1940
Alexandre Rames8626b742015-11-25 16:28:08 +00001941void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1942 HArm64DataProcWithShifterOp* instruction) {
1943 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1944 instruction->GetType() == Primitive::kPrimLong);
1945 LocationSummary* locations =
1946 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1947 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1948 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1949 } else {
1950 locations->SetInAt(0, Location::RequiresRegister());
1951 }
1952 locations->SetInAt(1, Location::RequiresRegister());
1953 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1954}
1955
1956void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1957 HArm64DataProcWithShifterOp* instruction) {
1958 Primitive::Type type = instruction->GetType();
1959 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1960 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1961 Register out = OutputRegister(instruction);
1962 Register left;
1963 if (kind != HInstruction::kNeg) {
1964 left = InputRegisterAt(instruction, 0);
1965 }
1966 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1967 // shifter operand operation, the IR generating `right_reg` (input to the type
1968 // conversion) can have a different type from the current instruction's type,
1969 // so we manually indicate the type.
1970 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001971 int64_t shift_amount = instruction->GetShiftAmount() &
1972 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001973
1974 Operand right_operand(0);
1975
1976 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1977 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1978 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1979 } else {
1980 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1981 }
1982
1983 // Logical binary operations do not support extension operations in the
1984 // operand. Note that VIXL would still manage if it was passed by generating
1985 // the extension as a separate instruction.
1986 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1987 DCHECK(!right_operand.IsExtendedRegister() ||
1988 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1989 kind != HInstruction::kNeg));
1990 switch (kind) {
1991 case HInstruction::kAdd:
1992 __ Add(out, left, right_operand);
1993 break;
1994 case HInstruction::kAnd:
1995 __ And(out, left, right_operand);
1996 break;
1997 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001998 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001999 __ Neg(out, right_operand);
2000 break;
2001 case HInstruction::kOr:
2002 __ Orr(out, left, right_operand);
2003 break;
2004 case HInstruction::kSub:
2005 __ Sub(out, left, right_operand);
2006 break;
2007 case HInstruction::kXor:
2008 __ Eor(out, left, right_operand);
2009 break;
2010 default:
2011 LOG(FATAL) << "Unexpected operation kind: " << kind;
2012 UNREACHABLE();
2013 }
2014}
2015
Artem Serov328429f2016-07-06 16:23:04 +01002016void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002017 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
2018 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002019 LocationSummary* locations =
2020 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2021 locations->SetInAt(0, Location::RequiresRegister());
2022 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
2023 locations->SetOut(Location::RequiresRegister());
2024}
2025
Roland Levillain4a3aa572016-08-15 13:17:06 +00002026void InstructionCodeGeneratorARM64::VisitIntermediateAddress(
2027 HIntermediateAddress* instruction) {
2028 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
2029 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002030 __ Add(OutputRegister(instruction),
2031 InputRegisterAt(instruction, 0),
2032 Operand(InputOperandAt(instruction, 1)));
2033}
2034
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002035void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002036 LocationSummary* locations =
2037 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002038 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2039 if (instr->GetOpKind() == HInstruction::kSub &&
2040 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002041 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002042 // Don't allocate register for Mneg instruction.
2043 } else {
2044 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2045 Location::RequiresRegister());
2046 }
2047 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2048 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002049 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2050}
2051
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002052void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002053 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002054 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2055 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002056
2057 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2058 // This fixup should be carried out for all multiply-accumulate instructions:
2059 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2060 if (instr->GetType() == Primitive::kPrimLong &&
2061 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2062 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002063 vixl::aarch64::Instruction* prev =
2064 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002065 if (prev->IsLoadOrStore()) {
2066 // Make sure we emit only exactly one nop.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002067 vixl::aarch64::CodeBufferCheckScope scope(masm,
2068 kInstructionSize,
2069 vixl::aarch64::CodeBufferCheckScope::kCheck,
2070 vixl::aarch64::CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002071 __ nop();
2072 }
2073 }
2074
2075 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002076 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002077 __ Madd(res, mul_left, mul_right, accumulator);
2078 } else {
2079 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002080 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002081 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002082 __ Mneg(res, mul_left, mul_right);
2083 } else {
2084 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2085 __ Msub(res, mul_left, mul_right, accumulator);
2086 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002087 }
2088}
2089
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002090void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002091 bool object_array_get_with_read_barrier =
2092 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002093 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002094 new (GetGraph()->GetArena()) LocationSummary(instruction,
2095 object_array_get_with_read_barrier ?
2096 LocationSummary::kCallOnSlowPath :
2097 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002098 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002099 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01002100 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002101 locations->SetInAt(0, Location::RequiresRegister());
2102 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002103 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2104 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2105 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002106 // The output overlaps in the case of an object array get with
2107 // read barriers enabled: we do not want the move to overwrite the
2108 // array's location, as we need it to emit the read barrier.
2109 locations->SetOut(
2110 Location::RequiresRegister(),
2111 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002112 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002113}
2114
2115void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002116 Primitive::Type type = instruction->GetType();
2117 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002118 LocationSummary* locations = instruction->GetLocations();
2119 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002120 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002121 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002122 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2123 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002124 MacroAssembler* masm = GetVIXLAssembler();
2125 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002126 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002127 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002128
Roland Levillain44015862016-01-22 11:47:17 +00002129 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2130 // Object ArrayGet with Baker's read barrier case.
2131 Register temp = temps.AcquireW();
Roland Levillain4a3aa572016-08-15 13:17:06 +00002132 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
2133 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Roland Levillain44015862016-01-22 11:47:17 +00002134 // Note that a potential implicit null check is handled in the
2135 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2136 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2137 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002138 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002139 // General case.
2140 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002141 Register length;
2142 if (maybe_compressed_char_at) {
2143 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2144 length = temps.AcquireW();
2145 __ Ldr(length, HeapOperand(obj, count_offset));
2146 codegen_->MaybeRecordImplicitNullCheck(instruction);
2147 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002148 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002149 if (maybe_compressed_char_at) {
2150 vixl::aarch64::Label uncompressed_load, done;
2151 __ Tbz(length.W(), kWRegSize - 1, &uncompressed_load);
2152 __ Ldrb(Register(OutputCPURegister(instruction)),
2153 HeapOperand(obj, offset + Int64ConstantFrom(index)));
2154 __ B(&done);
2155 __ Bind(&uncompressed_load);
2156 __ Ldrh(Register(OutputCPURegister(instruction)),
2157 HeapOperand(obj, offset + (Int64ConstantFrom(index) << 1)));
2158 __ Bind(&done);
2159 } else {
2160 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2161 source = HeapOperand(obj, offset);
2162 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002163 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002164 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002165 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002166 // The read barrier instrumentation does not support the
2167 // HIntermediateAddress instruction yet.
2168 DCHECK(!kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00002169 // We do not need to compute the intermediate address from the array: the
2170 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002171 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002172 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002173 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002174 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2175 }
2176 temp = obj;
2177 } else {
2178 __ Add(temp, obj, offset);
2179 }
jessicahandojo05765752016-09-09 19:01:32 -07002180 if (maybe_compressed_char_at) {
2181 vixl::aarch64::Label uncompressed_load, done;
2182 __ Tbz(length.W(), kWRegSize - 1, &uncompressed_load);
2183 __ Ldrb(Register(OutputCPURegister(instruction)),
2184 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2185 __ B(&done);
2186 __ Bind(&uncompressed_load);
2187 __ Ldrh(Register(OutputCPURegister(instruction)),
2188 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2189 __ Bind(&done);
2190 } else {
2191 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2192 }
Roland Levillain44015862016-01-22 11:47:17 +00002193 }
jessicahandojo05765752016-09-09 19:01:32 -07002194 if (!maybe_compressed_char_at) {
2195 codegen_->Load(type, OutputCPURegister(instruction), source);
2196 codegen_->MaybeRecordImplicitNullCheck(instruction);
2197 }
Roland Levillain44015862016-01-22 11:47:17 +00002198
2199 if (type == Primitive::kPrimNot) {
2200 static_assert(
2201 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2202 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2203 Location obj_loc = locations->InAt(0);
2204 if (index.IsConstant()) {
2205 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2206 } else {
2207 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2208 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002209 }
Roland Levillain4d027112015-07-01 15:41:14 +01002210 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002211}
2212
Alexandre Rames5319def2014-10-23 10:03:10 +01002213void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2214 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2215 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002216 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002217}
2218
2219void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002220 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002221 vixl::aarch64::Register out = OutputRegister(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002222 BlockPoolsScope block_pools(GetVIXLAssembler());
jessicahandojo05765752016-09-09 19:01:32 -07002223 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002224 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002225 // Mask out compression flag from String's array length.
2226 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2227 __ And(out.W(), out.W(), Operand(static_cast<int32_t>(INT32_MAX)));
2228 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002229}
2230
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002231void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002232 Primitive::Type value_type = instruction->GetComponentType();
2233
2234 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002235 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2236 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002237 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002238 LocationSummary::kCallOnSlowPath :
2239 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002240 locations->SetInAt(0, Location::RequiresRegister());
2241 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002242 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2243 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2244 } else if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002245 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002246 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002247 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002248 }
2249}
2250
2251void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2252 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002253 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002254 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002255 bool needs_write_barrier =
2256 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002257
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002258 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002259 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002260 CPURegister source = value;
2261 Location index = locations->InAt(1);
2262 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2263 MemOperand destination = HeapOperand(array);
2264 MacroAssembler* masm = GetVIXLAssembler();
2265 BlockPoolsScope block_pools(masm);
2266
2267 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002268 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002269 if (index.IsConstant()) {
2270 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2271 destination = HeapOperand(array, offset);
2272 } else {
2273 UseScratchRegisterScope temps(masm);
2274 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002275 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002276 // The read barrier instrumentation does not support the
2277 // HIntermediateAddress instruction yet.
2278 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002279 // We do not need to compute the intermediate address from the array: the
2280 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002281 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002282 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002283 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002284 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2285 }
2286 temp = array;
2287 } else {
2288 __ Add(temp, array, offset);
2289 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002290 destination = HeapOperand(temp,
2291 XRegisterFrom(index),
2292 LSL,
2293 Primitive::ComponentSizeShift(value_type));
2294 }
2295 codegen_->Store(value_type, value, destination);
2296 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002297 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002298 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002299 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002300 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002301 {
2302 // We use a block to end the scratch scope before the write barrier, thus
2303 // freeing the temporary registers so they can be used in `MarkGCCard`.
2304 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002305 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002306 if (index.IsConstant()) {
2307 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002308 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002309 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002310 destination = HeapOperand(temp,
2311 XRegisterFrom(index),
2312 LSL,
2313 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002314 }
2315
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002316 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2317 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2318 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2319
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002320 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002321 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2322 codegen_->AddSlowPath(slow_path);
2323 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002324 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002325 __ Cbnz(Register(value), &non_zero);
2326 if (!index.IsConstant()) {
2327 __ Add(temp, array, offset);
2328 }
2329 __ Str(wzr, destination);
2330 codegen_->MaybeRecordImplicitNullCheck(instruction);
2331 __ B(&done);
2332 __ Bind(&non_zero);
2333 }
2334
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002335 // Note that when Baker read barriers are enabled, the type
2336 // checks are performed without read barriers. This is fine,
2337 // even in the case where a class object is in the from-space
2338 // after the flip, as a comparison involving such a type would
2339 // not produce a false positive; it may of course produce a
2340 // false negative, in which case we would take the ArraySet
2341 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01002342
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002343 Register temp2 = temps.AcquireSameSizeAs(array);
2344 // /* HeapReference<Class> */ temp = array->klass_
2345 __ Ldr(temp, HeapOperand(array, class_offset));
2346 codegen_->MaybeRecordImplicitNullCheck(instruction);
2347 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01002348
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002349 // /* HeapReference<Class> */ temp = temp->component_type_
2350 __ Ldr(temp, HeapOperand(temp, component_offset));
2351 // /* HeapReference<Class> */ temp2 = value->klass_
2352 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2353 // If heap poisoning is enabled, no need to unpoison `temp`
2354 // nor `temp2`, as we are comparing two poisoned references.
2355 __ Cmp(temp, temp2);
2356 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01002357
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002358 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2359 vixl::aarch64::Label do_put;
2360 __ B(eq, &do_put);
2361 // If heap poisoning is enabled, the `temp` reference has
2362 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002363 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2364
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002365 // /* HeapReference<Class> */ temp = temp->super_class_
2366 __ Ldr(temp, HeapOperand(temp, super_offset));
2367 // If heap poisoning is enabled, no need to unpoison
2368 // `temp`, as we are comparing against null below.
2369 __ Cbnz(temp, slow_path->GetEntryLabel());
2370 __ Bind(&do_put);
2371 } else {
2372 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002373 }
2374 }
2375
2376 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002377 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002378 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002379 __ Mov(temp2, value.W());
2380 GetAssembler()->PoisonHeapReference(temp2);
2381 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002382 }
2383
2384 if (!index.IsConstant()) {
2385 __ Add(temp, array, offset);
2386 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002387 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002388
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002389 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002390 codegen_->MaybeRecordImplicitNullCheck(instruction);
2391 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002392 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002393
2394 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2395
2396 if (done.IsLinked()) {
2397 __ Bind(&done);
2398 }
2399
2400 if (slow_path != nullptr) {
2401 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002402 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002403 }
2404}
2405
Alexandre Rames67555f72014-11-18 10:55:16 +00002406void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002407 RegisterSet caller_saves = RegisterSet::Empty();
2408 InvokeRuntimeCallingConvention calling_convention;
2409 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2410 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
2411 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00002412 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002413 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002414}
2415
2416void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002417 BoundsCheckSlowPathARM64* slow_path =
2418 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002419 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00002420 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2421 __ B(slow_path->GetEntryLabel(), hs);
2422}
2423
Alexandre Rames67555f72014-11-18 10:55:16 +00002424void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2425 LocationSummary* locations =
2426 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2427 locations->SetInAt(0, Location::RequiresRegister());
2428 if (check->HasUses()) {
2429 locations->SetOut(Location::SameAsFirstInput());
2430 }
2431}
2432
2433void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2434 // We assume the class is not null.
2435 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2436 check->GetLoadClass(), check, check->GetDexPc(), true);
2437 codegen_->AddSlowPath(slow_path);
2438 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2439}
2440
Roland Levillain1a653882016-03-18 18:05:57 +00002441static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2442 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2443 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2444}
2445
2446void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2447 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2448 Location rhs_loc = instruction->GetLocations()->InAt(1);
2449 if (rhs_loc.IsConstant()) {
2450 // 0.0 is the only immediate that can be encoded directly in
2451 // an FCMP instruction.
2452 //
2453 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2454 // specify that in a floating-point comparison, positive zero
2455 // and negative zero are considered equal, so we can use the
2456 // literal 0.0 for both cases here.
2457 //
2458 // Note however that some methods (Float.equal, Float.compare,
2459 // Float.compareTo, Double.equal, Double.compare,
2460 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2461 // StrictMath.min) consider 0.0 to be (strictly) greater than
2462 // -0.0. So if we ever translate calls to these methods into a
2463 // HCompare instruction, we must handle the -0.0 case with
2464 // care here.
2465 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2466 __ Fcmp(lhs_reg, 0.0);
2467 } else {
2468 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2469 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002470}
2471
Serban Constantinescu02164b32014-11-13 14:05:07 +00002472void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002473 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002474 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2475 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002476 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002477 case Primitive::kPrimBoolean:
2478 case Primitive::kPrimByte:
2479 case Primitive::kPrimShort:
2480 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002481 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002482 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002483 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002484 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002485 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2486 break;
2487 }
2488 case Primitive::kPrimFloat:
2489 case Primitive::kPrimDouble: {
2490 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002491 locations->SetInAt(1,
2492 IsFloatingPointZeroConstant(compare->InputAt(1))
2493 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2494 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002495 locations->SetOut(Location::RequiresRegister());
2496 break;
2497 }
2498 default:
2499 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2500 }
2501}
2502
2503void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2504 Primitive::Type in_type = compare->InputAt(0)->GetType();
2505
2506 // 0 if: left == right
2507 // 1 if: left > right
2508 // -1 if: left < right
2509 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002510 case Primitive::kPrimBoolean:
2511 case Primitive::kPrimByte:
2512 case Primitive::kPrimShort:
2513 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002514 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002515 case Primitive::kPrimLong: {
2516 Register result = OutputRegister(compare);
2517 Register left = InputRegisterAt(compare, 0);
2518 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002519 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002520 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2521 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002522 break;
2523 }
2524 case Primitive::kPrimFloat:
2525 case Primitive::kPrimDouble: {
2526 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002527 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002528 __ Cset(result, ne);
2529 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002530 break;
2531 }
2532 default:
2533 LOG(FATAL) << "Unimplemented compare type " << in_type;
2534 }
2535}
2536
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002537void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002538 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002539
2540 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2541 locations->SetInAt(0, Location::RequiresFpuRegister());
2542 locations->SetInAt(1,
2543 IsFloatingPointZeroConstant(instruction->InputAt(1))
2544 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2545 : Location::RequiresFpuRegister());
2546 } else {
2547 // Integer cases.
2548 locations->SetInAt(0, Location::RequiresRegister());
2549 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2550 }
2551
David Brazdilb3e773e2016-01-26 11:28:37 +00002552 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002553 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002554 }
2555}
2556
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002557void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002558 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002559 return;
2560 }
2561
2562 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002563 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002564 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002565
Roland Levillain7f63c522015-07-13 15:54:55 +00002566 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002567 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002568 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002569 } else {
2570 // Integer cases.
2571 Register lhs = InputRegisterAt(instruction, 0);
2572 Operand rhs = InputOperandAt(instruction, 1);
2573 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002574 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002575 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002576}
2577
2578#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2579 M(Equal) \
2580 M(NotEqual) \
2581 M(LessThan) \
2582 M(LessThanOrEqual) \
2583 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002584 M(GreaterThanOrEqual) \
2585 M(Below) \
2586 M(BelowOrEqual) \
2587 M(Above) \
2588 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002589#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002590void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2591void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002592FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002593#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002594#undef FOR_EACH_CONDITION_INSTRUCTION
2595
Zheng Xuc6667102015-05-15 16:08:45 +08002596void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2597 DCHECK(instruction->IsDiv() || instruction->IsRem());
2598
2599 LocationSummary* locations = instruction->GetLocations();
2600 Location second = locations->InAt(1);
2601 DCHECK(second.IsConstant());
2602
2603 Register out = OutputRegister(instruction);
2604 Register dividend = InputRegisterAt(instruction, 0);
2605 int64_t imm = Int64FromConstant(second.GetConstant());
2606 DCHECK(imm == 1 || imm == -1);
2607
2608 if (instruction->IsRem()) {
2609 __ Mov(out, 0);
2610 } else {
2611 if (imm == 1) {
2612 __ Mov(out, dividend);
2613 } else {
2614 __ Neg(out, dividend);
2615 }
2616 }
2617}
2618
2619void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2620 DCHECK(instruction->IsDiv() || instruction->IsRem());
2621
2622 LocationSummary* locations = instruction->GetLocations();
2623 Location second = locations->InAt(1);
2624 DCHECK(second.IsConstant());
2625
2626 Register out = OutputRegister(instruction);
2627 Register dividend = InputRegisterAt(instruction, 0);
2628 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002629 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002630 int ctz_imm = CTZ(abs_imm);
2631
2632 UseScratchRegisterScope temps(GetVIXLAssembler());
2633 Register temp = temps.AcquireSameSizeAs(out);
2634
2635 if (instruction->IsDiv()) {
2636 __ Add(temp, dividend, abs_imm - 1);
2637 __ Cmp(dividend, 0);
2638 __ Csel(out, temp, dividend, lt);
2639 if (imm > 0) {
2640 __ Asr(out, out, ctz_imm);
2641 } else {
2642 __ Neg(out, Operand(out, ASR, ctz_imm));
2643 }
2644 } else {
2645 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2646 __ Asr(temp, dividend, bits - 1);
2647 __ Lsr(temp, temp, bits - ctz_imm);
2648 __ Add(out, dividend, temp);
2649 __ And(out, out, abs_imm - 1);
2650 __ Sub(out, out, temp);
2651 }
2652}
2653
2654void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2655 DCHECK(instruction->IsDiv() || instruction->IsRem());
2656
2657 LocationSummary* locations = instruction->GetLocations();
2658 Location second = locations->InAt(1);
2659 DCHECK(second.IsConstant());
2660
2661 Register out = OutputRegister(instruction);
2662 Register dividend = InputRegisterAt(instruction, 0);
2663 int64_t imm = Int64FromConstant(second.GetConstant());
2664
2665 Primitive::Type type = instruction->GetResultType();
2666 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2667
2668 int64_t magic;
2669 int shift;
2670 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2671
2672 UseScratchRegisterScope temps(GetVIXLAssembler());
2673 Register temp = temps.AcquireSameSizeAs(out);
2674
2675 // temp = get_high(dividend * magic)
2676 __ Mov(temp, magic);
2677 if (type == Primitive::kPrimLong) {
2678 __ Smulh(temp, dividend, temp);
2679 } else {
2680 __ Smull(temp.X(), dividend, temp);
2681 __ Lsr(temp.X(), temp.X(), 32);
2682 }
2683
2684 if (imm > 0 && magic < 0) {
2685 __ Add(temp, temp, dividend);
2686 } else if (imm < 0 && magic > 0) {
2687 __ Sub(temp, temp, dividend);
2688 }
2689
2690 if (shift != 0) {
2691 __ Asr(temp, temp, shift);
2692 }
2693
2694 if (instruction->IsDiv()) {
2695 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2696 } else {
2697 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2698 // TODO: Strength reduction for msub.
2699 Register temp_imm = temps.AcquireSameSizeAs(out);
2700 __ Mov(temp_imm, imm);
2701 __ Msub(out, temp, temp_imm, dividend);
2702 }
2703}
2704
2705void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2706 DCHECK(instruction->IsDiv() || instruction->IsRem());
2707 Primitive::Type type = instruction->GetResultType();
2708 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2709
2710 LocationSummary* locations = instruction->GetLocations();
2711 Register out = OutputRegister(instruction);
2712 Location second = locations->InAt(1);
2713
2714 if (second.IsConstant()) {
2715 int64_t imm = Int64FromConstant(second.GetConstant());
2716
2717 if (imm == 0) {
2718 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2719 } else if (imm == 1 || imm == -1) {
2720 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002721 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002722 DivRemByPowerOfTwo(instruction);
2723 } else {
2724 DCHECK(imm <= -2 || imm >= 2);
2725 GenerateDivRemWithAnyConstant(instruction);
2726 }
2727 } else {
2728 Register dividend = InputRegisterAt(instruction, 0);
2729 Register divisor = InputRegisterAt(instruction, 1);
2730 if (instruction->IsDiv()) {
2731 __ Sdiv(out, dividend, divisor);
2732 } else {
2733 UseScratchRegisterScope temps(GetVIXLAssembler());
2734 Register temp = temps.AcquireSameSizeAs(out);
2735 __ Sdiv(temp, dividend, divisor);
2736 __ Msub(out, temp, divisor, dividend);
2737 }
2738 }
2739}
2740
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002741void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2742 LocationSummary* locations =
2743 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2744 switch (div->GetResultType()) {
2745 case Primitive::kPrimInt:
2746 case Primitive::kPrimLong:
2747 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002748 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002749 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2750 break;
2751
2752 case Primitive::kPrimFloat:
2753 case Primitive::kPrimDouble:
2754 locations->SetInAt(0, Location::RequiresFpuRegister());
2755 locations->SetInAt(1, Location::RequiresFpuRegister());
2756 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2757 break;
2758
2759 default:
2760 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2761 }
2762}
2763
2764void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2765 Primitive::Type type = div->GetResultType();
2766 switch (type) {
2767 case Primitive::kPrimInt:
2768 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002769 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002770 break;
2771
2772 case Primitive::kPrimFloat:
2773 case Primitive::kPrimDouble:
2774 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2775 break;
2776
2777 default:
2778 LOG(FATAL) << "Unexpected div type " << type;
2779 }
2780}
2781
Alexandre Rames67555f72014-11-18 10:55:16 +00002782void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002783 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002784 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00002785}
2786
2787void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2788 SlowPathCodeARM64* slow_path =
2789 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2790 codegen_->AddSlowPath(slow_path);
2791 Location value = instruction->GetLocations()->InAt(0);
2792
Alexandre Rames3e69f162014-12-10 10:36:50 +00002793 Primitive::Type type = instruction->GetType();
2794
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002795 if (!Primitive::IsIntegralType(type)) {
2796 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002797 return;
2798 }
2799
Alexandre Rames67555f72014-11-18 10:55:16 +00002800 if (value.IsConstant()) {
2801 int64_t divisor = Int64ConstantFrom(value);
2802 if (divisor == 0) {
2803 __ B(slow_path->GetEntryLabel());
2804 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002805 // A division by a non-null constant is valid. We don't need to perform
2806 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002807 }
2808 } else {
2809 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2810 }
2811}
2812
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002813void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2814 LocationSummary* locations =
2815 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2816 locations->SetOut(Location::ConstantLocation(constant));
2817}
2818
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002819void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2820 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002821 // Will be generated at use site.
2822}
2823
Alexandre Rames5319def2014-10-23 10:03:10 +01002824void LocationsBuilderARM64::VisitExit(HExit* exit) {
2825 exit->SetLocations(nullptr);
2826}
2827
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002828void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002829}
2830
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002831void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2832 LocationSummary* locations =
2833 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2834 locations->SetOut(Location::ConstantLocation(constant));
2835}
2836
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002837void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002838 // Will be generated at use site.
2839}
2840
David Brazdilfc6a86a2015-06-26 10:33:45 +00002841void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002842 DCHECK(!successor->IsExitBlock());
2843 HBasicBlock* block = got->GetBlock();
2844 HInstruction* previous = got->GetPrevious();
2845 HLoopInformation* info = block->GetLoopInformation();
2846
David Brazdil46e2a392015-03-16 17:31:52 +00002847 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002848 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2849 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2850 return;
2851 }
2852 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2853 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2854 }
2855 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002856 __ B(codegen_->GetLabelOf(successor));
2857 }
2858}
2859
David Brazdilfc6a86a2015-06-26 10:33:45 +00002860void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2861 got->SetLocations(nullptr);
2862}
2863
2864void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2865 HandleGoto(got, got->GetSuccessor());
2866}
2867
2868void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2869 try_boundary->SetLocations(nullptr);
2870}
2871
2872void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2873 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2874 if (!successor->IsExitBlock()) {
2875 HandleGoto(try_boundary, successor);
2876 }
2877}
2878
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002879void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002880 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002881 vixl::aarch64::Label* true_target,
2882 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002883 // FP branching requires both targets to be explicit. If either of the targets
2884 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002885 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002886 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002887
David Brazdil0debae72015-11-12 18:37:00 +00002888 if (true_target == nullptr && false_target == nullptr) {
2889 // Nothing to do. The code always falls through.
2890 return;
2891 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002892 // Constant condition, statically compared against "true" (integer value 1).
2893 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002894 if (true_target != nullptr) {
2895 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002896 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002897 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002898 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002899 if (false_target != nullptr) {
2900 __ B(false_target);
2901 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002902 }
David Brazdil0debae72015-11-12 18:37:00 +00002903 return;
2904 }
2905
2906 // The following code generates these patterns:
2907 // (1) true_target == nullptr && false_target != nullptr
2908 // - opposite condition true => branch to false_target
2909 // (2) true_target != nullptr && false_target == nullptr
2910 // - condition true => branch to true_target
2911 // (3) true_target != nullptr && false_target != nullptr
2912 // - condition true => branch to true_target
2913 // - branch to false_target
2914 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002915 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002916 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002917 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002918 if (true_target == nullptr) {
2919 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2920 } else {
2921 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2922 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002923 } else {
2924 // The condition instruction has not been materialized, use its inputs as
2925 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002926 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002927
David Brazdil0debae72015-11-12 18:37:00 +00002928 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002929 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002930 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002931 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002932 IfCondition opposite_condition = condition->GetOppositeCondition();
2933 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002934 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002935 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002936 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002937 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002938 // Integer cases.
2939 Register lhs = InputRegisterAt(condition, 0);
2940 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002941
2942 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01002943 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002944 if (true_target == nullptr) {
2945 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2946 non_fallthrough_target = false_target;
2947 } else {
2948 arm64_cond = ARM64Condition(condition->GetCondition());
2949 non_fallthrough_target = true_target;
2950 }
2951
Aart Bik086d27e2016-01-20 17:02:00 -08002952 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01002953 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002954 switch (arm64_cond) {
2955 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002956 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002957 break;
2958 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002959 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002960 break;
2961 case lt:
2962 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002963 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002964 break;
2965 case ge:
2966 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002967 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002968 break;
2969 default:
2970 // Without the `static_cast` the compiler throws an error for
2971 // `-Werror=sign-promo`.
2972 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2973 }
2974 } else {
2975 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002976 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002977 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002978 }
2979 }
David Brazdil0debae72015-11-12 18:37:00 +00002980
2981 // If neither branch falls through (case 3), the conditional branch to `true_target`
2982 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2983 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002984 __ B(false_target);
2985 }
David Brazdil0debae72015-11-12 18:37:00 +00002986
2987 if (fallthrough_target.IsLinked()) {
2988 __ Bind(&fallthrough_target);
2989 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002990}
2991
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002992void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2993 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002994 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002995 locations->SetInAt(0, Location::RequiresRegister());
2996 }
2997}
2998
2999void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003000 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3001 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003002 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3003 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3004 true_target = nullptr;
3005 }
3006 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3007 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3008 false_target = nullptr;
3009 }
David Brazdil0debae72015-11-12 18:37:00 +00003010 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003011}
3012
3013void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
3014 LocationSummary* locations = new (GetGraph()->GetArena())
3015 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01003016 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00003017 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003018 locations->SetInAt(0, Location::RequiresRegister());
3019 }
3020}
3021
3022void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003023 SlowPathCodeARM64* slow_path =
3024 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003025 GenerateTestAndBranch(deoptimize,
3026 /* condition_input_index */ 0,
3027 slow_path->GetEntryLabel(),
3028 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003029}
3030
David Brazdilc0b601b2016-02-08 14:20:45 +00003031static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3032 return condition->IsCondition() &&
3033 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
3034}
3035
Alexandre Rames880f1192016-06-13 16:04:50 +01003036static inline Condition GetConditionForSelect(HCondition* condition) {
3037 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003038 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3039 : ARM64Condition(cond);
3040}
3041
David Brazdil74eb1b22015-12-14 11:44:01 +00003042void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3043 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01003044 if (Primitive::IsFloatingPointType(select->GetType())) {
3045 locations->SetInAt(0, Location::RequiresFpuRegister());
3046 locations->SetInAt(1, Location::RequiresFpuRegister());
3047 locations->SetOut(Location::RequiresFpuRegister());
3048 } else {
3049 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3050 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3051 bool is_true_value_constant = cst_true_value != nullptr;
3052 bool is_false_value_constant = cst_false_value != nullptr;
3053 // Ask VIXL whether we should synthesize constants in registers.
3054 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3055 Operand true_op = is_true_value_constant ?
3056 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3057 Operand false_op = is_false_value_constant ?
3058 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3059 bool true_value_in_register = false;
3060 bool false_value_in_register = false;
3061 MacroAssembler::GetCselSynthesisInformation(
3062 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3063 true_value_in_register |= !is_true_value_constant;
3064 false_value_in_register |= !is_false_value_constant;
3065
3066 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3067 : Location::ConstantLocation(cst_true_value));
3068 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3069 : Location::ConstantLocation(cst_false_value));
3070 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003071 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003072
David Brazdil74eb1b22015-12-14 11:44:01 +00003073 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3074 locations->SetInAt(2, Location::RequiresRegister());
3075 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003076}
3077
3078void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003079 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003080 Condition csel_cond;
3081
3082 if (IsBooleanValueOrMaterializedCondition(cond)) {
3083 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003084 // Use the condition flags set by the previous instruction.
3085 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003086 } else {
3087 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003088 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003089 }
3090 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003091 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003092 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003093 } else {
3094 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003095 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003096 }
3097
Alexandre Rames880f1192016-06-13 16:04:50 +01003098 if (Primitive::IsFloatingPointType(select->GetType())) {
3099 __ Fcsel(OutputFPRegister(select),
3100 InputFPRegisterAt(select, 1),
3101 InputFPRegisterAt(select, 0),
3102 csel_cond);
3103 } else {
3104 __ Csel(OutputRegister(select),
3105 InputOperandAt(select, 1),
3106 InputOperandAt(select, 0),
3107 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003108 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003109}
3110
David Srbecky0cf44932015-12-09 14:09:59 +00003111void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3112 new (GetGraph()->GetArena()) LocationSummary(info);
3113}
3114
David Srbeckyd28f4a02016-03-14 17:14:24 +00003115void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3116 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003117}
3118
3119void CodeGeneratorARM64::GenerateNop() {
3120 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003121}
3122
Alexandre Rames5319def2014-10-23 10:03:10 +01003123void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003124 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003125}
3126
3127void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003128 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003129}
3130
3131void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003132 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003133}
3134
3135void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003136 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003137}
3138
Roland Levillain44015862016-01-22 11:47:17 +00003139static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3140 return kEmitCompilerReadBarrier &&
3141 (kUseBakerReadBarrier ||
3142 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3143 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3144 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3145}
3146
Alexandre Rames67555f72014-11-18 10:55:16 +00003147void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003148 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003149 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003150 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003151 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003152 case TypeCheckKind::kExactCheck:
3153 case TypeCheckKind::kAbstractClassCheck:
3154 case TypeCheckKind::kClassHierarchyCheck:
3155 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003156 call_kind =
3157 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01003158 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003159 break;
3160 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003161 case TypeCheckKind::kUnresolvedCheck:
3162 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003163 call_kind = LocationSummary::kCallOnSlowPath;
3164 break;
3165 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003166
Alexandre Rames67555f72014-11-18 10:55:16 +00003167 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003168 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003169 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003170 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003171 locations->SetInAt(0, Location::RequiresRegister());
3172 locations->SetInAt(1, Location::RequiresRegister());
3173 // The "out" register is used as a temporary, so it overlaps with the inputs.
3174 // Note that TypeCheckSlowPathARM64 uses this register too.
3175 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3176 // When read barriers are enabled, we need a temporary register for
3177 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003178 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003179 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003180 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003181}
3182
3183void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003184 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003185 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003186 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003187 Register obj = InputRegisterAt(instruction, 0);
3188 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003189 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003190 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003191 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3192 locations->GetTemp(0) :
3193 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003194 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3195 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3196 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3197 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003198
Scott Wakeling97c72b72016-06-24 16:19:36 +01003199 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003200 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003201
3202 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003203 // Avoid null check if we know `obj` is not null.
3204 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003205 __ Cbz(obj, &zero);
3206 }
3207
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003208 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003209 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003210
Roland Levillain44015862016-01-22 11:47:17 +00003211 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003212 case TypeCheckKind::kExactCheck: {
3213 __ Cmp(out, cls);
3214 __ Cset(out, eq);
3215 if (zero.IsLinked()) {
3216 __ B(&done);
3217 }
3218 break;
3219 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003220
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003221 case TypeCheckKind::kAbstractClassCheck: {
3222 // If the class is abstract, we eagerly fetch the super class of the
3223 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003224 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003225 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003226 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003227 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003228 // If `out` is null, we use it for the result, and jump to `done`.
3229 __ Cbz(out, &done);
3230 __ Cmp(out, cls);
3231 __ B(ne, &loop);
3232 __ Mov(out, 1);
3233 if (zero.IsLinked()) {
3234 __ B(&done);
3235 }
3236 break;
3237 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003238
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003239 case TypeCheckKind::kClassHierarchyCheck: {
3240 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003241 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003242 __ Bind(&loop);
3243 __ Cmp(out, cls);
3244 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003245 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003246 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003247 __ Cbnz(out, &loop);
3248 // If `out` is null, we use it for the result, and jump to `done`.
3249 __ B(&done);
3250 __ Bind(&success);
3251 __ Mov(out, 1);
3252 if (zero.IsLinked()) {
3253 __ B(&done);
3254 }
3255 break;
3256 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003257
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003258 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003259 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003260 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003261 __ Cmp(out, cls);
3262 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003263 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003264 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003265 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003266 // If `out` is null, we use it for the result, and jump to `done`.
3267 __ Cbz(out, &done);
3268 __ Ldrh(out, HeapOperand(out, primitive_offset));
3269 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3270 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003271 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003272 __ Mov(out, 1);
3273 __ B(&done);
3274 break;
3275 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003276
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003277 case TypeCheckKind::kArrayCheck: {
3278 __ Cmp(out, cls);
3279 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003280 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3281 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003282 codegen_->AddSlowPath(slow_path);
3283 __ B(ne, slow_path->GetEntryLabel());
3284 __ Mov(out, 1);
3285 if (zero.IsLinked()) {
3286 __ B(&done);
3287 }
3288 break;
3289 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003290
Calin Juravle98893e12015-10-02 21:05:03 +01003291 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003292 case TypeCheckKind::kInterfaceCheck: {
3293 // Note that we indeed only call on slow path, but we always go
3294 // into the slow path for the unresolved and interface check
3295 // cases.
3296 //
3297 // We cannot directly call the InstanceofNonTrivial runtime
3298 // entry point without resorting to a type checking slow path
3299 // here (i.e. by calling InvokeRuntime directly), as it would
3300 // require to assign fixed registers for the inputs of this
3301 // HInstanceOf instruction (following the runtime calling
3302 // convention), which might be cluttered by the potential first
3303 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003304 //
3305 // TODO: Introduce a new runtime entry point taking the object
3306 // to test (instead of its class) as argument, and let it deal
3307 // with the read barrier issues. This will let us refactor this
3308 // case of the `switch` code as it was previously (with a direct
3309 // call to the runtime not using a type checking slow path).
3310 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003311 DCHECK(locations->OnlyCallsOnSlowPath());
3312 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3313 /* is_fatal */ false);
3314 codegen_->AddSlowPath(slow_path);
3315 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003316 if (zero.IsLinked()) {
3317 __ B(&done);
3318 }
3319 break;
3320 }
3321 }
3322
3323 if (zero.IsLinked()) {
3324 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003325 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003326 }
3327
3328 if (done.IsLinked()) {
3329 __ Bind(&done);
3330 }
3331
3332 if (slow_path != nullptr) {
3333 __ Bind(slow_path->GetExitLabel());
3334 }
3335}
3336
3337void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3338 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3339 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3340
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003341 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3342 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003343 case TypeCheckKind::kExactCheck:
3344 case TypeCheckKind::kAbstractClassCheck:
3345 case TypeCheckKind::kClassHierarchyCheck:
3346 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003347 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3348 LocationSummary::kCallOnSlowPath :
3349 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003350 break;
3351 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003352 case TypeCheckKind::kUnresolvedCheck:
3353 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003354 call_kind = LocationSummary::kCallOnSlowPath;
3355 break;
3356 }
3357
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003358 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3359 locations->SetInAt(0, Location::RequiresRegister());
3360 locations->SetInAt(1, Location::RequiresRegister());
3361 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3362 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003363 // When read barriers are enabled, we need an additional temporary
3364 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003365 if (TypeCheckNeedsATemporary(type_check_kind)) {
3366 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003367 }
3368}
3369
3370void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003371 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003372 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003373 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003374 Register obj = InputRegisterAt(instruction, 0);
3375 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003376 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003377 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3378 locations->GetTemp(1) :
3379 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003380 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003381 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3382 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3383 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3384 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003385
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003386 bool is_type_check_slow_path_fatal =
3387 (type_check_kind == TypeCheckKind::kExactCheck ||
3388 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3389 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3390 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3391 !instruction->CanThrowIntoCatchBlock();
3392 SlowPathCodeARM64* type_check_slow_path =
3393 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3394 is_type_check_slow_path_fatal);
3395 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003396
Scott Wakeling97c72b72016-06-24 16:19:36 +01003397 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003398 // Avoid null check if we know obj is not null.
3399 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003400 __ Cbz(obj, &done);
3401 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003402
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003403 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003404 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003405
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003406 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003407 case TypeCheckKind::kExactCheck:
3408 case TypeCheckKind::kArrayCheck: {
3409 __ Cmp(temp, cls);
3410 // Jump to slow path for throwing the exception or doing a
3411 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003412 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003413 break;
3414 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003415
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003416 case TypeCheckKind::kAbstractClassCheck: {
3417 // If the class is abstract, we eagerly fetch the super class of the
3418 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003419 vixl::aarch64::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003420 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003421 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003422 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003423
3424 // If the class reference currently in `temp` is not null, jump
3425 // to the `compare_classes` label to compare it with the checked
3426 // class.
3427 __ Cbnz(temp, &compare_classes);
3428 // Otherwise, jump to the slow path to throw the exception.
3429 //
3430 // But before, move back the object's class into `temp` before
3431 // going into the slow path, as it has been overwritten in the
3432 // meantime.
3433 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003434 GenerateReferenceLoadTwoRegisters(
3435 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003436 __ B(type_check_slow_path->GetEntryLabel());
3437
3438 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003439 __ Cmp(temp, cls);
3440 __ B(ne, &loop);
3441 break;
3442 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003443
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003444 case TypeCheckKind::kClassHierarchyCheck: {
3445 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003446 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003447 __ Bind(&loop);
3448 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003449 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003450
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003451 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003452 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003453
3454 // If the class reference currently in `temp` is not null, jump
3455 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003456 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003457 // Otherwise, jump to the slow path to throw the exception.
3458 //
3459 // But before, move back the object's class into `temp` before
3460 // going into the slow path, as it has been overwritten in the
3461 // meantime.
3462 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003463 GenerateReferenceLoadTwoRegisters(
3464 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003465 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003466 break;
3467 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003468
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003469 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003470 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003471 vixl::aarch64::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003472 __ Cmp(temp, cls);
3473 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003474
3475 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003476 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003477 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003478
3479 // If the component type is not null (i.e. the object is indeed
3480 // an array), jump to label `check_non_primitive_component_type`
3481 // to further check that this component type is not a primitive
3482 // type.
3483 __ Cbnz(temp, &check_non_primitive_component_type);
3484 // Otherwise, jump to the slow path to throw the exception.
3485 //
3486 // But before, move back the object's class into `temp` before
3487 // going into the slow path, as it has been overwritten in the
3488 // meantime.
3489 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003490 GenerateReferenceLoadTwoRegisters(
3491 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003492 __ B(type_check_slow_path->GetEntryLabel());
3493
3494 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003495 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3496 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003497 __ Cbz(temp, &done);
3498 // Same comment as above regarding `temp` and the slow path.
3499 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003500 GenerateReferenceLoadTwoRegisters(
3501 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003502 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003503 break;
3504 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003505
Calin Juravle98893e12015-10-02 21:05:03 +01003506 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003507 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003508 // We always go into the type check slow path for the unresolved
3509 // and interface check cases.
3510 //
3511 // We cannot directly call the CheckCast runtime entry point
3512 // without resorting to a type checking slow path here (i.e. by
3513 // calling InvokeRuntime directly), as it would require to
3514 // assign fixed registers for the inputs of this HInstanceOf
3515 // instruction (following the runtime calling convention), which
3516 // might be cluttered by the potential first read barrier
3517 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003518 //
3519 // TODO: Introduce a new runtime entry point taking the object
3520 // to test (instead of its class) as argument, and let it deal
3521 // with the read barrier issues. This will let us refactor this
3522 // case of the `switch` code as it was previously (with a direct
3523 // call to the runtime not using a type checking slow path).
3524 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003525 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003526 break;
3527 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003528 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003529
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003530 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003531}
3532
Alexandre Rames5319def2014-10-23 10:03:10 +01003533void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3534 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3535 locations->SetOut(Location::ConstantLocation(constant));
3536}
3537
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003538void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003539 // Will be generated at use site.
3540}
3541
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003542void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3543 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3544 locations->SetOut(Location::ConstantLocation(constant));
3545}
3546
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003547void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003548 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003549}
3550
Calin Juravle175dc732015-08-25 15:42:32 +01003551void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3552 // The trampoline uses the same calling convention as dex calling conventions,
3553 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3554 // the method_idx.
3555 HandleInvoke(invoke);
3556}
3557
3558void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3559 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3560}
3561
Alexandre Rames5319def2014-10-23 10:03:10 +01003562void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003563 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003564 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003565}
3566
Alexandre Rames67555f72014-11-18 10:55:16 +00003567void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3568 HandleInvoke(invoke);
3569}
3570
3571void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3572 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003573 LocationSummary* locations = invoke->GetLocations();
3574 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003575 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003576 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003577 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003578
3579 // The register ip1 is required to be used for the hidden argument in
3580 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003581 MacroAssembler* masm = GetVIXLAssembler();
3582 UseScratchRegisterScope scratch_scope(masm);
3583 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003584 scratch_scope.Exclude(ip1);
3585 __ Mov(ip1, invoke->GetDexMethodIndex());
3586
Alexandre Rames67555f72014-11-18 10:55:16 +00003587 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003588 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003589 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003590 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003591 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003592 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003593 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003594 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003595 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003596 // Instead of simply (possibly) unpoisoning `temp` here, we should
3597 // emit a read barrier for the previous class reference load.
3598 // However this is not required in practice, as this is an
3599 // intermediate/temporary reference and because the current
3600 // concurrent copying collector keeps the from-space memory
3601 // intact/accessible until the end of the marking phase (the
3602 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003603 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003604 __ Ldr(temp,
3605 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3606 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003607 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003608 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003609 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003610 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003611 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003612 // lr();
3613 __ Blr(lr);
3614 DCHECK(!codegen_->IsLeafMethod());
3615 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3616}
3617
3618void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003619 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3620 if (intrinsic.TryDispatch(invoke)) {
3621 return;
3622 }
3623
Alexandre Rames67555f72014-11-18 10:55:16 +00003624 HandleInvoke(invoke);
3625}
3626
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003627void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003628 // Explicit clinit checks triggered by static invokes must have been pruned by
3629 // art::PrepareForRegisterAllocation.
3630 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003631
Andreas Gampe878d58c2015-01-15 23:24:00 -08003632 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3633 if (intrinsic.TryDispatch(invoke)) {
3634 return;
3635 }
3636
Alexandre Rames67555f72014-11-18 10:55:16 +00003637 HandleInvoke(invoke);
3638}
3639
Andreas Gampe878d58c2015-01-15 23:24:00 -08003640static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3641 if (invoke->GetLocations()->Intrinsified()) {
3642 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3643 intrinsic.Dispatch(invoke);
3644 return true;
3645 }
3646 return false;
3647}
3648
Vladimir Markodc151b22015-10-15 18:02:30 +01003649HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3650 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01003651 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003652 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003653 return desired_dispatch_info;
3654}
3655
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003656void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003657 // For better instruction scheduling we load the direct code pointer before the method pointer.
3658 bool direct_code_loaded = false;
3659 switch (invoke->GetCodePtrLocation()) {
3660 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3661 // LR = code address from literal pool with link-time patch.
3662 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3663 direct_code_loaded = true;
3664 break;
3665 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3666 // LR = invoke->GetDirectCodePtr();
3667 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3668 direct_code_loaded = true;
3669 break;
3670 default:
3671 break;
3672 }
3673
Andreas Gampe878d58c2015-01-15 23:24:00 -08003674 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003675 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3676 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003677 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
3678 uint32_t offset =
3679 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00003680 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003681 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00003682 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003683 }
Vladimir Marko58155012015-08-19 12:49:41 +00003684 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003685 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003686 break;
3687 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3688 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003689 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003690 break;
3691 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3692 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003693 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003694 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3695 break;
3696 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3697 // Add ADRP with its PC-relative DexCache access patch.
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01003698 const DexFile& dex_file = invoke->GetDexFile();
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003699 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003700 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markoaad75c62016-10-03 08:46:48 +00003701 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00003702 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003703 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003704 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00003705 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00003706 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003707 }
Vladimir Marko58155012015-08-19 12:49:41 +00003708 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003709 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003710 Register reg = XRegisterFrom(temp);
3711 Register method_reg;
3712 if (current_method.IsRegister()) {
3713 method_reg = XRegisterFrom(current_method);
3714 } else {
3715 DCHECK(invoke->GetLocations()->Intrinsified());
3716 DCHECK(!current_method.IsValid());
3717 method_reg = reg;
3718 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3719 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003720
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003721 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003722 __ Ldr(reg.X(),
3723 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07003724 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003725 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003726 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3727 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003728 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3729 break;
3730 }
3731 }
3732
3733 switch (invoke->GetCodePtrLocation()) {
3734 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3735 __ Bl(&frame_entry_label_);
3736 break;
3737 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00003738 relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
3739 invoke->GetTargetMethod().dex_method_index);
Scott Wakeling97c72b72016-06-24 16:19:36 +01003740 vixl::aarch64::Label* label = &relative_call_patches_.back().label;
3741 SingleEmissionCheckScope guard(GetVIXLAssembler());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003742 __ Bind(label);
3743 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003744 break;
3745 }
3746 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3747 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3748 // LR prepared above for better instruction scheduling.
3749 DCHECK(direct_code_loaded);
3750 // lr()
3751 __ Blr(lr);
3752 break;
3753 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3754 // LR = callee_method->entry_point_from_quick_compiled_code_;
3755 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003756 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07003757 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003758 // lr()
3759 __ Blr(lr);
3760 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003761 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003762
Andreas Gampe878d58c2015-01-15 23:24:00 -08003763 DCHECK(!IsLeafMethod());
3764}
3765
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003766void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003767 // Use the calling convention instead of the location of the receiver, as
3768 // intrinsics may have put the receiver in a different register. In the intrinsics
3769 // slow path, the arguments have been moved to the right place, so here we are
3770 // guaranteed that the receiver is the first register of the calling convention.
3771 InvokeDexCallingConvention calling_convention;
3772 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003773 Register temp = XRegisterFrom(temp_in);
3774 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3775 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3776 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003777 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003778
3779 BlockPoolsScope block_pools(GetVIXLAssembler());
3780
3781 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003782 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003783 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003784 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003785 // Instead of simply (possibly) unpoisoning `temp` here, we should
3786 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003787 // intermediate/temporary reference and because the current
3788 // concurrent copying collector keeps the from-space memory
3789 // intact/accessible until the end of the marking phase (the
3790 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003791 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3792 // temp = temp->GetMethodAt(method_offset);
3793 __ Ldr(temp, MemOperand(temp, method_offset));
3794 // lr = temp->GetEntryPoint();
3795 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3796 // lr();
3797 __ Blr(lr);
3798}
3799
Scott Wakeling97c72b72016-06-24 16:19:36 +01003800vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
3801 const DexFile& dex_file,
3802 uint32_t string_index,
3803 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003804 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3805}
3806
Scott Wakeling97c72b72016-06-24 16:19:36 +01003807vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
3808 const DexFile& dex_file,
3809 uint32_t type_index,
3810 vixl::aarch64::Label* adrp_label) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003811 return NewPcRelativePatch(dex_file, type_index, adrp_label, &pc_relative_type_patches_);
3812}
3813
Scott Wakeling97c72b72016-06-24 16:19:36 +01003814vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
3815 const DexFile& dex_file,
3816 uint32_t element_offset,
3817 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003818 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3819}
3820
Scott Wakeling97c72b72016-06-24 16:19:36 +01003821vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
3822 const DexFile& dex_file,
3823 uint32_t offset_or_index,
3824 vixl::aarch64::Label* adrp_label,
3825 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003826 // Add a patch entry and return the label.
3827 patches->emplace_back(dex_file, offset_or_index);
3828 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003829 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003830 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3831 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3832 return label;
3833}
3834
Scott Wakeling97c72b72016-06-24 16:19:36 +01003835vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003836 const DexFile& dex_file, uint32_t string_index) {
3837 return boot_image_string_patches_.GetOrCreate(
3838 StringReference(&dex_file, string_index),
3839 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3840}
3841
Scott Wakeling97c72b72016-06-24 16:19:36 +01003842vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003843 const DexFile& dex_file, uint32_t type_index) {
3844 return boot_image_type_patches_.GetOrCreate(
3845 TypeReference(&dex_file, type_index),
3846 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3847}
3848
Scott Wakeling97c72b72016-06-24 16:19:36 +01003849vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
3850 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003851 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3852 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3853 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3854}
3855
Scott Wakeling97c72b72016-06-24 16:19:36 +01003856vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(
3857 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003858 return DeduplicateUint64Literal(address);
3859}
3860
Vladimir Markoaad75c62016-10-03 08:46:48 +00003861void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
3862 vixl::aarch64::Register reg) {
3863 DCHECK(reg.IsX());
3864 SingleEmissionCheckScope guard(GetVIXLAssembler());
3865 __ Bind(fixup_label);
3866 __ adrp(reg, /* offset placeholder */ 0);
3867}
3868
3869void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
3870 vixl::aarch64::Register out,
3871 vixl::aarch64::Register base) {
3872 DCHECK(out.IsX());
3873 DCHECK(base.IsX());
3874 SingleEmissionCheckScope guard(GetVIXLAssembler());
3875 __ Bind(fixup_label);
3876 __ add(out, base, Operand(/* offset placeholder */ 0));
3877}
3878
3879void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
3880 vixl::aarch64::Register out,
3881 vixl::aarch64::Register base) {
3882 DCHECK(base.IsX());
3883 SingleEmissionCheckScope guard(GetVIXLAssembler());
3884 __ Bind(fixup_label);
3885 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
3886}
3887
3888template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
3889inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
3890 const ArenaDeque<PcRelativePatchInfo>& infos,
3891 ArenaVector<LinkerPatch>* linker_patches) {
3892 for (const PcRelativePatchInfo& info : infos) {
3893 linker_patches->push_back(Factory(info.label.GetLocation(),
3894 &info.target_dex_file,
3895 info.pc_insn_label->GetLocation(),
3896 info.offset_or_index));
3897 }
3898}
3899
Vladimir Marko58155012015-08-19 12:49:41 +00003900void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3901 DCHECK(linker_patches->empty());
3902 size_t size =
3903 method_patches_.size() +
3904 call_patches_.size() +
3905 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003906 pc_relative_dex_cache_patches_.size() +
3907 boot_image_string_patches_.size() +
3908 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003909 boot_image_type_patches_.size() +
3910 pc_relative_type_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003911 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003912 linker_patches->reserve(size);
3913 for (const auto& entry : method_patches_) {
3914 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003915 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3916 linker_patches->push_back(LinkerPatch::MethodPatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003917 target_method.dex_file,
3918 target_method.dex_method_index));
3919 }
3920 for (const auto& entry : call_patches_) {
3921 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003922 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3923 linker_patches->push_back(LinkerPatch::CodePatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003924 target_method.dex_file,
3925 target_method.dex_method_index));
3926 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00003927 for (const PatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) {
3928 linker_patches->push_back(
3929 LinkerPatch::RelativeCodePatch(info.label.GetLocation(), &info.dex_file, info.index));
Vladimir Marko58155012015-08-19 12:49:41 +00003930 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003931 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003932 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003933 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003934 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003935 info.offset_or_index));
3936 }
3937 for (const auto& entry : boot_image_string_patches_) {
3938 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003939 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3940 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003941 target_string.dex_file,
3942 target_string.string_index));
3943 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00003944 if (!GetCompilerOptions().IsBootImage()) {
3945 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
3946 linker_patches);
3947 } else {
3948 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
3949 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003950 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003951 for (const auto& entry : boot_image_type_patches_) {
3952 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003953 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3954 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003955 target_type.dex_file,
3956 target_type.type_index));
3957 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00003958 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
3959 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003960 for (const auto& entry : boot_image_address_patches_) {
3961 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003962 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3963 linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003964 }
3965}
3966
Scott Wakeling97c72b72016-06-24 16:19:36 +01003967vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003968 Uint32ToLiteralMap* map) {
3969 return map->GetOrCreate(
3970 value,
3971 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3972}
3973
Scott Wakeling97c72b72016-06-24 16:19:36 +01003974vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003975 return uint64_literals_.GetOrCreate(
3976 value,
3977 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003978}
3979
Scott Wakeling97c72b72016-06-24 16:19:36 +01003980vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003981 MethodReference target_method,
3982 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003983 return map->GetOrCreate(
3984 target_method,
3985 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003986}
3987
Scott Wakeling97c72b72016-06-24 16:19:36 +01003988vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003989 MethodReference target_method) {
3990 return DeduplicateMethodLiteral(target_method, &method_patches_);
3991}
3992
Scott Wakeling97c72b72016-06-24 16:19:36 +01003993vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003994 MethodReference target_method) {
3995 return DeduplicateMethodLiteral(target_method, &call_patches_);
3996}
3997
3998
Andreas Gampe878d58c2015-01-15 23:24:00 -08003999void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004000 // Explicit clinit checks triggered by static invokes must have been pruned by
4001 // art::PrepareForRegisterAllocation.
4002 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004003
Andreas Gampe878d58c2015-01-15 23:24:00 -08004004 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4005 return;
4006 }
4007
Alexandre Ramesd921d642015-04-16 15:07:16 +01004008 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004009 LocationSummary* locations = invoke->GetLocations();
4010 codegen_->GenerateStaticOrDirectCall(
4011 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00004012 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01004013}
4014
4015void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004016 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4017 return;
4018 }
4019
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004020 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004021 DCHECK(!codegen_->IsLeafMethod());
4022 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4023}
4024
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004025HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4026 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004027 switch (desired_class_load_kind) {
4028 case HLoadClass::LoadKind::kReferrersClass:
4029 break;
4030 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4031 DCHECK(!GetCompilerOptions().GetCompilePic());
4032 break;
4033 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
4034 DCHECK(GetCompilerOptions().GetCompilePic());
4035 break;
4036 case HLoadClass::LoadKind::kBootImageAddress:
4037 break;
4038 case HLoadClass::LoadKind::kDexCacheAddress:
4039 DCHECK(Runtime::Current()->UseJitCompilation());
4040 break;
4041 case HLoadClass::LoadKind::kDexCachePcRelative:
4042 DCHECK(!Runtime::Current()->UseJitCompilation());
4043 break;
4044 case HLoadClass::LoadKind::kDexCacheViaMethod:
4045 break;
4046 }
4047 return desired_class_load_kind;
4048}
4049
Alexandre Rames67555f72014-11-18 10:55:16 +00004050void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004051 if (cls->NeedsAccessCheck()) {
4052 InvokeRuntimeCallingConvention calling_convention;
4053 CodeGenerator::CreateLoadClassLocationSummary(
4054 cls,
4055 LocationFrom(calling_convention.GetRegisterAt(0)),
Scott Wakeling97c72b72016-06-24 16:19:36 +01004056 LocationFrom(vixl::aarch64::x0),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004057 /* code_generator_supports_read_barrier */ true);
4058 return;
4059 }
4060
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004061 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
4062 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004063 ? LocationSummary::kCallOnSlowPath
4064 : LocationSummary::kNoCall;
4065 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004066 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004067 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004068 }
4069
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004070 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4071 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
4072 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
4073 locations->SetInAt(0, Location::RequiresRegister());
4074 }
4075 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004076}
4077
4078void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01004079 if (cls->NeedsAccessCheck()) {
4080 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004081 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004082 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01004083 return;
4084 }
4085
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004086 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004087 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004088
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004089 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004090 bool generate_null_check = false;
4091 switch (cls->GetLoadKind()) {
4092 case HLoadClass::LoadKind::kReferrersClass: {
4093 DCHECK(!cls->CanCallRuntime());
4094 DCHECK(!cls->MustGenerateClinitCheck());
4095 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4096 Register current_method = InputRegisterAt(cls, 0);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004097 GenerateGcRootFieldLoad(cls,
4098 out_loc,
4099 current_method,
4100 ArtMethod::DeclaringClassOffset().Int32Value(),
Roland Levillain00468f32016-10-27 18:02:48 +01004101 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004102 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004103 break;
4104 }
4105 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004106 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004107 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4108 cls->GetTypeIndex()));
4109 break;
4110 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004111 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004112 // Add ADRP with its PC-relative type patch.
4113 const DexFile& dex_file = cls->GetDexFile();
4114 uint32_t type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004115 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004116 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004117 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004118 vixl::aarch64::Label* add_label =
4119 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004120 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004121 break;
4122 }
4123 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004124 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004125 DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
4126 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
4127 break;
4128 }
4129 case HLoadClass::LoadKind::kDexCacheAddress: {
4130 DCHECK_NE(cls->GetAddress(), 0u);
4131 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4132 // that gives a 16KiB range. To try and reduce the number of literals if we load
4133 // multiple types, simply split the dex cache address to a 16KiB aligned base
4134 // loaded from a literal and the remaining offset embedded in the load.
4135 static_assert(sizeof(GcRoot<mirror::Class>) == 4u, "Expected GC root to be 4 bytes.");
4136 DCHECK_ALIGNED(cls->GetAddress(), 4u);
4137 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4138 uint64_t base_address = cls->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4139 uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits);
4140 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4141 // /* GcRoot<mirror::Class> */ out = *(base_address + offset)
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004142 GenerateGcRootFieldLoad(cls,
4143 out_loc,
4144 out.X(),
4145 offset,
Roland Levillain00468f32016-10-27 18:02:48 +01004146 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004147 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004148 generate_null_check = !cls->IsInDexCache();
4149 break;
4150 }
4151 case HLoadClass::LoadKind::kDexCachePcRelative: {
4152 // Add ADRP with its PC-relative DexCache access patch.
4153 const DexFile& dex_file = cls->GetDexFile();
4154 uint32_t element_offset = cls->GetDexCacheElementOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004155 vixl::aarch64::Label* adrp_label =
4156 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004157 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004158 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004159 vixl::aarch64::Label* ldr_label =
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004160 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4161 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004162 GenerateGcRootFieldLoad(cls,
4163 out_loc,
4164 out.X(),
4165 /* offset placeholder */ 0,
4166 ldr_label,
4167 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004168 generate_null_check = !cls->IsInDexCache();
4169 break;
4170 }
4171 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4172 MemberOffset resolved_types_offset =
4173 ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
4174 // /* GcRoot<mirror::Class>[] */ out =
4175 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4176 Register current_method = InputRegisterAt(cls, 0);
4177 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
4178 // /* GcRoot<mirror::Class> */ out = out[type_index]
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004179 GenerateGcRootFieldLoad(cls,
4180 out_loc,
4181 out.X(),
4182 CodeGenerator::GetCacheOffset(cls->GetTypeIndex()),
Roland Levillain00468f32016-10-27 18:02:48 +01004183 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004184 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004185 generate_null_check = !cls->IsInDexCache();
4186 break;
4187 }
4188 }
4189
4190 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4191 DCHECK(cls->CanCallRuntime());
4192 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4193 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4194 codegen_->AddSlowPath(slow_path);
4195 if (generate_null_check) {
4196 __ Cbz(out, slow_path->GetEntryLabel());
4197 }
4198 if (cls->MustGenerateClinitCheck()) {
4199 GenerateClassInitializationCheck(slow_path, out);
4200 } else {
4201 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004202 }
4203 }
4204}
4205
David Brazdilcb1c0552015-08-04 16:22:25 +01004206static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004207 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004208}
4209
Alexandre Rames67555f72014-11-18 10:55:16 +00004210void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4211 LocationSummary* locations =
4212 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4213 locations->SetOut(Location::RequiresRegister());
4214}
4215
4216void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004217 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4218}
4219
4220void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4221 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4222}
4223
4224void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4225 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004226}
4227
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004228HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4229 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004230 switch (desired_string_load_kind) {
4231 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4232 DCHECK(!GetCompilerOptions().GetCompilePic());
4233 break;
4234 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4235 DCHECK(GetCompilerOptions().GetCompilePic());
4236 break;
4237 case HLoadString::LoadKind::kBootImageAddress:
4238 break;
4239 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01004240 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004241 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004242 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01004243 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004244 break;
4245 case HLoadString::LoadKind::kDexCacheViaMethod:
4246 break;
4247 }
4248 return desired_string_load_kind;
4249}
4250
Alexandre Rames67555f72014-11-18 10:55:16 +00004251void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004252 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
Vladimir Markoaad75c62016-10-03 08:46:48 +00004253 ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod)
4254 ? LocationSummary::kCallOnMainOnly
4255 : LocationSummary::kCallOnSlowPath)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004256 : LocationSummary::kNoCall;
4257 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004258 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004259 InvokeRuntimeCallingConvention calling_convention;
4260 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
4261 } else {
4262 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004263 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
4264 if (!kUseReadBarrier || kUseBakerReadBarrier) {
4265 // Rely on the pResolveString and/or marking to save everything, including temps.
4266 RegisterSet caller_saves = RegisterSet::Empty();
4267 InvokeRuntimeCallingConvention calling_convention;
4268 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
4269 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
4270 RegisterFrom(calling_convention.GetReturnLocation(Primitive::kPrimNot),
4271 Primitive::kPrimNot).GetCode());
4272 locations->SetCustomSlowPathCallerSaves(caller_saves);
4273 } else {
4274 // For non-Baker read barrier we have a temp-clobbering call.
4275 }
4276 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004277 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004278}
4279
4280void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004281 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004282
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004283 switch (load->GetLoadKind()) {
4284 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004285 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4286 load->GetStringIndex()));
4287 return; // No dex cache slow path.
4288 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004289 // Add ADRP with its PC-relative String patch.
4290 const DexFile& dex_file = load->GetDexFile();
4291 uint32_t string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004292 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Scott Wakeling97c72b72016-06-24 16:19:36 +01004293 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004294 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004295 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004296 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004297 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004298 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004299 return; // No dex cache slow path.
4300 }
4301 case HLoadString::LoadKind::kBootImageAddress: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004302 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4303 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4304 return; // No dex cache slow path.
4305 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004306 case HLoadString::LoadKind::kBssEntry: {
4307 // Add ADRP with its PC-relative String .bss entry patch.
4308 const DexFile& dex_file = load->GetDexFile();
4309 uint32_t string_index = load->GetStringIndex();
4310 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004311 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4312 Register temp = temps.AcquireX();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004313 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004314 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004315 // Add LDR with its PC-relative String patch.
4316 vixl::aarch64::Label* ldr_label =
4317 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4318 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4319 GenerateGcRootFieldLoad(load,
4320 load->GetLocations()->Out(),
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004321 temp,
Roland Levillain00468f32016-10-27 18:02:48 +01004322 /* offset placeholder */ 0u,
4323 ldr_label,
4324 kEmitCompilerReadBarrier);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004325 SlowPathCodeARM64* slow_path =
4326 new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load, temp, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004327 codegen_->AddSlowPath(slow_path);
4328 __ Cbz(out.X(), slow_path->GetEntryLabel());
4329 __ Bind(slow_path->GetExitLabel());
4330 return;
4331 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004332 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004333 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004334 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004335
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004336 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004337 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004338 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004339 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex());
4340 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
4341 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004342}
4343
Alexandre Rames5319def2014-10-23 10:03:10 +01004344void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4345 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4346 locations->SetOut(Location::ConstantLocation(constant));
4347}
4348
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004349void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004350 // Will be generated at use site.
4351}
4352
Alexandre Rames67555f72014-11-18 10:55:16 +00004353void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4354 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004355 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004356 InvokeRuntimeCallingConvention calling_convention;
4357 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4358}
4359
4360void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004361 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject: kQuickUnlockObject,
4362 instruction,
4363 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004364 if (instruction->IsEnter()) {
4365 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4366 } else {
4367 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4368 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004369}
4370
Alexandre Rames42d641b2014-10-27 14:00:51 +00004371void LocationsBuilderARM64::VisitMul(HMul* mul) {
4372 LocationSummary* locations =
4373 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4374 switch (mul->GetResultType()) {
4375 case Primitive::kPrimInt:
4376 case Primitive::kPrimLong:
4377 locations->SetInAt(0, Location::RequiresRegister());
4378 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004379 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004380 break;
4381
4382 case Primitive::kPrimFloat:
4383 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004384 locations->SetInAt(0, Location::RequiresFpuRegister());
4385 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004386 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004387 break;
4388
4389 default:
4390 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4391 }
4392}
4393
4394void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4395 switch (mul->GetResultType()) {
4396 case Primitive::kPrimInt:
4397 case Primitive::kPrimLong:
4398 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4399 break;
4400
4401 case Primitive::kPrimFloat:
4402 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004403 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004404 break;
4405
4406 default:
4407 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4408 }
4409}
4410
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004411void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4412 LocationSummary* locations =
4413 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4414 switch (neg->GetResultType()) {
4415 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004416 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004417 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004418 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004419 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004420
4421 case Primitive::kPrimFloat:
4422 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004423 locations->SetInAt(0, Location::RequiresFpuRegister());
4424 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004425 break;
4426
4427 default:
4428 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4429 }
4430}
4431
4432void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4433 switch (neg->GetResultType()) {
4434 case Primitive::kPrimInt:
4435 case Primitive::kPrimLong:
4436 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4437 break;
4438
4439 case Primitive::kPrimFloat:
4440 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004441 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004442 break;
4443
4444 default:
4445 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4446 }
4447}
4448
4449void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4450 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004451 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004452 InvokeRuntimeCallingConvention calling_convention;
4453 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004454 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004455 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004456 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004457}
4458
4459void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4460 LocationSummary* locations = instruction->GetLocations();
4461 InvokeRuntimeCallingConvention calling_convention;
4462 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4463 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004464 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004465 // Note: if heap poisoning is enabled, the entry point takes cares
4466 // of poisoning the reference.
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004467 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Mathieu Chartiere401d142015-04-22 13:56:20 -07004468 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004469}
4470
Alexandre Rames5319def2014-10-23 10:03:10 +01004471void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4472 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004473 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004474 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004475 if (instruction->IsStringAlloc()) {
4476 locations->AddTemp(LocationFrom(kArtMethodRegister));
4477 } else {
4478 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4479 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4480 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004481 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4482}
4483
4484void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004485 // Note: if heap poisoning is enabled, the entry point takes cares
4486 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004487 if (instruction->IsStringAlloc()) {
4488 // String is allocated through StringFactory. Call NewEmptyString entry point.
4489 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07004490 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004491 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4492 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4493 __ Blr(lr);
4494 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4495 } else {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004496 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00004497 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4498 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004499}
4500
4501void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4502 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004503 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004504 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004505}
4506
4507void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004508 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004509 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004510 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004511 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004512 break;
4513
4514 default:
4515 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4516 }
4517}
4518
David Brazdil66d126e2015-04-03 16:02:44 +01004519void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4520 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4521 locations->SetInAt(0, Location::RequiresRegister());
4522 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4523}
4524
4525void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004526 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01004527}
4528
Alexandre Rames5319def2014-10-23 10:03:10 +01004529void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004530 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4531 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01004532}
4533
Calin Juravle2ae48182016-03-16 14:05:09 +00004534void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4535 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004536 return;
4537 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004538
Alexandre Ramesd921d642015-04-16 15:07:16 +01004539 BlockPoolsScope block_pools(GetVIXLAssembler());
4540 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004541 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004542 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004543}
4544
Calin Juravle2ae48182016-03-16 14:05:09 +00004545void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004546 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004547 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004548
4549 LocationSummary* locations = instruction->GetLocations();
4550 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004551
4552 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004553}
4554
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004555void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004556 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004557}
4558
Alexandre Rames67555f72014-11-18 10:55:16 +00004559void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4560 HandleBinaryOp(instruction);
4561}
4562
4563void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4564 HandleBinaryOp(instruction);
4565}
4566
Alexandre Rames3e69f162014-12-10 10:36:50 +00004567void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4568 LOG(FATAL) << "Unreachable";
4569}
4570
4571void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4572 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4573}
4574
Alexandre Rames5319def2014-10-23 10:03:10 +01004575void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4576 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4577 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4578 if (location.IsStackSlot()) {
4579 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4580 } else if (location.IsDoubleStackSlot()) {
4581 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4582 }
4583 locations->SetOut(location);
4584}
4585
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004586void InstructionCodeGeneratorARM64::VisitParameterValue(
4587 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004588 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004589}
4590
4591void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4592 LocationSummary* locations =
4593 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004594 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004595}
4596
4597void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4598 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4599 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004600}
4601
4602void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4603 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004604 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004605 locations->SetInAt(i, Location::Any());
4606 }
4607 locations->SetOut(Location::Any());
4608}
4609
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004610void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004611 LOG(FATAL) << "Unreachable";
4612}
4613
Serban Constantinescu02164b32014-11-13 14:05:07 +00004614void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004615 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004616 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004617 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
4618 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004619 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4620
4621 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004622 case Primitive::kPrimInt:
4623 case Primitive::kPrimLong:
4624 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004625 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004626 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4627 break;
4628
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004629 case Primitive::kPrimFloat:
4630 case Primitive::kPrimDouble: {
4631 InvokeRuntimeCallingConvention calling_convention;
4632 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4633 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4634 locations->SetOut(calling_convention.GetReturnLocation(type));
4635
4636 break;
4637 }
4638
Serban Constantinescu02164b32014-11-13 14:05:07 +00004639 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004640 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004641 }
4642}
4643
4644void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4645 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004646
Serban Constantinescu02164b32014-11-13 14:05:07 +00004647 switch (type) {
4648 case Primitive::kPrimInt:
4649 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004650 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004651 break;
4652 }
4653
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004654 case Primitive::kPrimFloat:
4655 case Primitive::kPrimDouble: {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004656 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
4657 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004658 if (type == Primitive::kPrimFloat) {
4659 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4660 } else {
4661 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4662 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004663 break;
4664 }
4665
Serban Constantinescu02164b32014-11-13 14:05:07 +00004666 default:
4667 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004668 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004669 }
4670}
4671
Calin Juravle27df7582015-04-17 19:12:31 +01004672void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4673 memory_barrier->SetLocations(nullptr);
4674}
4675
4676void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004677 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004678}
4679
Alexandre Rames5319def2014-10-23 10:03:10 +01004680void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4681 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4682 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004683 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004684}
4685
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004686void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004687 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004688}
4689
4690void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4691 instruction->SetLocations(nullptr);
4692}
4693
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004694void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004695 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004696}
4697
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004698void LocationsBuilderARM64::VisitRor(HRor* ror) {
4699 HandleBinaryOp(ror);
4700}
4701
4702void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4703 HandleBinaryOp(ror);
4704}
4705
Serban Constantinescu02164b32014-11-13 14:05:07 +00004706void LocationsBuilderARM64::VisitShl(HShl* shl) {
4707 HandleShift(shl);
4708}
4709
4710void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4711 HandleShift(shl);
4712}
4713
4714void LocationsBuilderARM64::VisitShr(HShr* shr) {
4715 HandleShift(shr);
4716}
4717
4718void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4719 HandleShift(shr);
4720}
4721
Alexandre Rames5319def2014-10-23 10:03:10 +01004722void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004723 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004724}
4725
4726void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004727 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004728}
4729
Alexandre Rames67555f72014-11-18 10:55:16 +00004730void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004731 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004732}
4733
4734void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004735 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004736}
4737
4738void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004739 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004740}
4741
Alexandre Rames67555f72014-11-18 10:55:16 +00004742void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004743 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004744}
4745
Calin Juravlee460d1d2015-09-29 04:52:17 +01004746void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4747 HUnresolvedInstanceFieldGet* instruction) {
4748 FieldAccessCallingConventionARM64 calling_convention;
4749 codegen_->CreateUnresolvedFieldLocationSummary(
4750 instruction, instruction->GetFieldType(), calling_convention);
4751}
4752
4753void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4754 HUnresolvedInstanceFieldGet* instruction) {
4755 FieldAccessCallingConventionARM64 calling_convention;
4756 codegen_->GenerateUnresolvedFieldAccess(instruction,
4757 instruction->GetFieldType(),
4758 instruction->GetFieldIndex(),
4759 instruction->GetDexPc(),
4760 calling_convention);
4761}
4762
4763void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4764 HUnresolvedInstanceFieldSet* instruction) {
4765 FieldAccessCallingConventionARM64 calling_convention;
4766 codegen_->CreateUnresolvedFieldLocationSummary(
4767 instruction, instruction->GetFieldType(), calling_convention);
4768}
4769
4770void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4771 HUnresolvedInstanceFieldSet* instruction) {
4772 FieldAccessCallingConventionARM64 calling_convention;
4773 codegen_->GenerateUnresolvedFieldAccess(instruction,
4774 instruction->GetFieldType(),
4775 instruction->GetFieldIndex(),
4776 instruction->GetDexPc(),
4777 calling_convention);
4778}
4779
4780void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4781 HUnresolvedStaticFieldGet* instruction) {
4782 FieldAccessCallingConventionARM64 calling_convention;
4783 codegen_->CreateUnresolvedFieldLocationSummary(
4784 instruction, instruction->GetFieldType(), calling_convention);
4785}
4786
4787void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4788 HUnresolvedStaticFieldGet* instruction) {
4789 FieldAccessCallingConventionARM64 calling_convention;
4790 codegen_->GenerateUnresolvedFieldAccess(instruction,
4791 instruction->GetFieldType(),
4792 instruction->GetFieldIndex(),
4793 instruction->GetDexPc(),
4794 calling_convention);
4795}
4796
4797void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4798 HUnresolvedStaticFieldSet* instruction) {
4799 FieldAccessCallingConventionARM64 calling_convention;
4800 codegen_->CreateUnresolvedFieldLocationSummary(
4801 instruction, instruction->GetFieldType(), calling_convention);
4802}
4803
4804void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4805 HUnresolvedStaticFieldSet* instruction) {
4806 FieldAccessCallingConventionARM64 calling_convention;
4807 codegen_->GenerateUnresolvedFieldAccess(instruction,
4808 instruction->GetFieldType(),
4809 instruction->GetFieldIndex(),
4810 instruction->GetDexPc(),
4811 calling_convention);
4812}
4813
Alexandre Rames5319def2014-10-23 10:03:10 +01004814void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01004815 LocationSummary* locations =
4816 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01004817 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexandre Rames5319def2014-10-23 10:03:10 +01004818}
4819
4820void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004821 HBasicBlock* block = instruction->GetBlock();
4822 if (block->GetLoopInformation() != nullptr) {
4823 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4824 // The back edge will generate the suspend check.
4825 return;
4826 }
4827 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4828 // The goto will generate the suspend check.
4829 return;
4830 }
4831 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004832}
4833
Alexandre Rames67555f72014-11-18 10:55:16 +00004834void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4835 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004836 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004837 InvokeRuntimeCallingConvention calling_convention;
4838 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4839}
4840
4841void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004842 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004843 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004844}
4845
4846void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4847 LocationSummary* locations =
4848 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4849 Primitive::Type input_type = conversion->GetInputType();
4850 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004851 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004852 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4853 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4854 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4855 }
4856
Alexandre Rames542361f2015-01-29 16:57:31 +00004857 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004858 locations->SetInAt(0, Location::RequiresFpuRegister());
4859 } else {
4860 locations->SetInAt(0, Location::RequiresRegister());
4861 }
4862
Alexandre Rames542361f2015-01-29 16:57:31 +00004863 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004864 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4865 } else {
4866 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4867 }
4868}
4869
4870void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4871 Primitive::Type result_type = conversion->GetResultType();
4872 Primitive::Type input_type = conversion->GetInputType();
4873
4874 DCHECK_NE(input_type, result_type);
4875
Alexandre Rames542361f2015-01-29 16:57:31 +00004876 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004877 int result_size = Primitive::ComponentSize(result_type);
4878 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004879 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004880 Register output = OutputRegister(conversion);
4881 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004882 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004883 // 'int' values are used directly as W registers, discarding the top
4884 // bits, so we don't need to sign-extend and can just perform a move.
4885 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4886 // top 32 bits of the target register. We theoretically could leave those
4887 // bits unchanged, but we would have to make sure that no code uses a
4888 // 32bit input value as a 64bit value assuming that the top 32 bits are
4889 // zero.
4890 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004891 } else if (result_type == Primitive::kPrimChar ||
4892 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4893 __ Ubfx(output,
4894 output.IsX() ? source.X() : source.W(),
4895 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004896 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004897 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004898 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004899 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004900 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004901 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004902 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4903 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004904 } else if (Primitive::IsFloatingPointType(result_type) &&
4905 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004906 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4907 } else {
4908 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4909 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004910 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004911}
Alexandre Rames67555f72014-11-18 10:55:16 +00004912
Serban Constantinescu02164b32014-11-13 14:05:07 +00004913void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4914 HandleShift(ushr);
4915}
4916
4917void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4918 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004919}
4920
4921void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4922 HandleBinaryOp(instruction);
4923}
4924
4925void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4926 HandleBinaryOp(instruction);
4927}
4928
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004929void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004930 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004931 LOG(FATAL) << "Unreachable";
4932}
4933
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004934void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004935 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004936 LOG(FATAL) << "Unreachable";
4937}
4938
Mark Mendellfe57faa2015-09-18 09:26:15 -04004939// Simple implementation of packed switch - generate cascaded compare/jumps.
4940void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4941 LocationSummary* locations =
4942 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4943 locations->SetInAt(0, Location::RequiresRegister());
4944}
4945
4946void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4947 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004948 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004949 Register value_reg = InputRegisterAt(switch_instr, 0);
4950 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4951
Zheng Xu3927c8b2015-11-18 17:46:25 +08004952 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004953 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08004954 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4955 // make sure we don't emit it if the target may run out of range.
4956 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4957 // ranges and emit the tables only as required.
4958 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004959
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004960 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004961 // Current instruction id is an upper bound of the number of HIRs in the graph.
4962 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4963 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004964 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4965 Register temp = temps.AcquireW();
4966 __ Subs(temp, value_reg, Operand(lower_bound));
4967
Zheng Xu3927c8b2015-11-18 17:46:25 +08004968 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004969 // Jump to successors[0] if value == lower_bound.
4970 __ B(eq, codegen_->GetLabelOf(successors[0]));
4971 int32_t last_index = 0;
4972 for (; num_entries - last_index > 2; last_index += 2) {
4973 __ Subs(temp, temp, Operand(2));
4974 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4975 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4976 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4977 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4978 }
4979 if (num_entries - last_index == 2) {
4980 // The last missing case_value.
4981 __ Cmp(temp, Operand(1));
4982 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004983 }
4984
4985 // And the default for any other value.
4986 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4987 __ B(codegen_->GetLabelOf(default_block));
4988 }
4989 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004990 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004991
4992 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4993
4994 // Below instructions should use at most one blocked register. Since there are two blocked
4995 // registers, we are free to block one.
4996 Register temp_w = temps.AcquireW();
4997 Register index;
4998 // Remove the bias.
4999 if (lower_bound != 0) {
5000 index = temp_w;
5001 __ Sub(index, value_reg, Operand(lower_bound));
5002 } else {
5003 index = value_reg;
5004 }
5005
5006 // Jump to default block if index is out of the range.
5007 __ Cmp(index, Operand(num_entries));
5008 __ B(hs, codegen_->GetLabelOf(default_block));
5009
5010 // In current VIXL implementation, it won't require any blocked registers to encode the
5011 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
5012 // register pressure.
5013 Register table_base = temps.AcquireX();
5014 // Load jump offset from the table.
5015 __ Adr(table_base, jump_table->GetTableStartLabel());
5016 Register jump_offset = temp_w;
5017 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5018
5019 // Jump to target block by branching to table_base(pc related) + offset.
5020 Register target_address = table_base;
5021 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5022 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005023 }
5024}
5025
Roland Levillain44015862016-01-22 11:47:17 +00005026void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
5027 Location out,
5028 uint32_t offset,
5029 Location maybe_temp) {
5030 Primitive::Type type = Primitive::kPrimNot;
5031 Register out_reg = RegisterFrom(out, type);
5032 if (kEmitCompilerReadBarrier) {
5033 Register temp_reg = RegisterFrom(maybe_temp, type);
5034 if (kUseBakerReadBarrier) {
5035 // Load with fast path based Baker's read barrier.
5036 // /* HeapReference<Object> */ out = *(out + offset)
5037 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5038 out,
5039 out_reg,
5040 offset,
5041 temp_reg,
5042 /* needs_null_check */ false,
5043 /* use_load_acquire */ false);
5044 } else {
5045 // Load with slow path based read barrier.
5046 // Save the value of `out` into `maybe_temp` before overwriting it
5047 // in the following move operation, as we will need it for the
5048 // read barrier below.
5049 __ Mov(temp_reg, out_reg);
5050 // /* HeapReference<Object> */ out = *(out + offset)
5051 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5052 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5053 }
5054 } else {
5055 // Plain load with no read barrier.
5056 // /* HeapReference<Object> */ out = *(out + offset)
5057 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5058 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5059 }
5060}
5061
5062void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
5063 Location out,
5064 Location obj,
5065 uint32_t offset,
5066 Location maybe_temp) {
5067 Primitive::Type type = Primitive::kPrimNot;
5068 Register out_reg = RegisterFrom(out, type);
5069 Register obj_reg = RegisterFrom(obj, type);
5070 if (kEmitCompilerReadBarrier) {
5071 if (kUseBakerReadBarrier) {
5072 // Load with fast path based Baker's read barrier.
5073 Register temp_reg = RegisterFrom(maybe_temp, type);
5074 // /* HeapReference<Object> */ out = *(obj + offset)
5075 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5076 out,
5077 obj_reg,
5078 offset,
5079 temp_reg,
5080 /* needs_null_check */ false,
5081 /* use_load_acquire */ false);
5082 } else {
5083 // Load with slow path based read barrier.
5084 // /* HeapReference<Object> */ out = *(obj + offset)
5085 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5086 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5087 }
5088 } else {
5089 // Plain load with no read barrier.
5090 // /* HeapReference<Object> */ out = *(obj + offset)
5091 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5092 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5093 }
5094}
5095
5096void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
5097 Location root,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005098 Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005099 uint32_t offset,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005100 vixl::aarch64::Label* fixup_label,
5101 bool requires_read_barrier) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005102 DCHECK(fixup_label == nullptr || offset == 0u);
Roland Levillain44015862016-01-22 11:47:17 +00005103 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005104 if (requires_read_barrier) {
5105 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005106 if (kUseBakerReadBarrier) {
5107 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5108 // Baker's read barrier are used:
5109 //
5110 // root = obj.field;
5111 // if (Thread::Current()->GetIsGcMarking()) {
5112 // root = ReadBarrier::Mark(root)
5113 // }
5114
5115 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005116 if (fixup_label == nullptr) {
5117 __ Ldr(root_reg, MemOperand(obj, offset));
5118 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005119 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005120 }
Roland Levillain44015862016-01-22 11:47:17 +00005121 static_assert(
5122 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5123 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5124 "have different sizes.");
5125 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5126 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5127 "have different sizes.");
5128
Vladimir Marko953437b2016-08-24 08:30:46 +00005129 // Slow path marking the GC root `root`.
Roland Levillain44015862016-01-22 11:47:17 +00005130 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005131 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root);
Roland Levillain44015862016-01-22 11:47:17 +00005132 codegen_->AddSlowPath(slow_path);
5133
5134 MacroAssembler* masm = GetVIXLAssembler();
5135 UseScratchRegisterScope temps(masm);
5136 Register temp = temps.AcquireW();
5137 // temp = Thread::Current()->GetIsGcMarking()
Andreas Gampe542451c2016-07-26 09:02:02 -07005138 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64PointerSize>().Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00005139 __ Cbnz(temp, slow_path->GetEntryLabel());
5140 __ Bind(slow_path->GetExitLabel());
5141 } else {
5142 // GC root loaded through a slow path for read barriers other
5143 // than Baker's.
5144 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005145 if (fixup_label == nullptr) {
5146 __ Add(root_reg.X(), obj.X(), offset);
5147 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005148 codegen_->EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005149 }
Roland Levillain44015862016-01-22 11:47:17 +00005150 // /* mirror::Object* */ root = root->Read()
5151 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5152 }
5153 } else {
5154 // Plain GC root load with no read barrier.
5155 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005156 if (fixup_label == nullptr) {
5157 __ Ldr(root_reg, MemOperand(obj, offset));
5158 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005159 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005160 }
Roland Levillain44015862016-01-22 11:47:17 +00005161 // Note that GC roots are not affected by heap poisoning, thus we
5162 // do not have to unpoison `root_reg` here.
5163 }
5164}
5165
5166void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5167 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005168 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005169 uint32_t offset,
5170 Register temp,
5171 bool needs_null_check,
5172 bool use_load_acquire) {
5173 DCHECK(kEmitCompilerReadBarrier);
5174 DCHECK(kUseBakerReadBarrier);
5175
5176 // /* HeapReference<Object> */ ref = *(obj + offset)
5177 Location no_index = Location::NoLocation();
Roland Levillainbfea3352016-06-23 13:48:47 +01005178 size_t no_scale_factor = 0U;
5179 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5180 ref,
5181 obj,
5182 offset,
5183 no_index,
5184 no_scale_factor,
5185 temp,
5186 needs_null_check,
5187 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005188}
5189
5190void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5191 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005192 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005193 uint32_t data_offset,
5194 Location index,
5195 Register temp,
5196 bool needs_null_check) {
5197 DCHECK(kEmitCompilerReadBarrier);
5198 DCHECK(kUseBakerReadBarrier);
5199
5200 // Array cells are never volatile variables, therefore array loads
5201 // never use Load-Acquire instructions on ARM64.
5202 const bool use_load_acquire = false;
5203
Roland Levillainbfea3352016-06-23 13:48:47 +01005204 static_assert(
5205 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5206 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005207 // /* HeapReference<Object> */ ref =
5208 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005209 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5210 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5211 ref,
5212 obj,
5213 data_offset,
5214 index,
5215 scale_factor,
5216 temp,
5217 needs_null_check,
5218 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005219}
5220
5221void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5222 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005223 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005224 uint32_t offset,
5225 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005226 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005227 Register temp,
5228 bool needs_null_check,
5229 bool use_load_acquire) {
5230 DCHECK(kEmitCompilerReadBarrier);
5231 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005232 // If we are emitting an array load, we should not be using a
5233 // Load Acquire instruction. In other words:
5234 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5235 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005236
5237 MacroAssembler* masm = GetVIXLAssembler();
5238 UseScratchRegisterScope temps(masm);
5239
5240 // In slow path based read barriers, the read barrier call is
5241 // inserted after the original load. However, in fast path based
5242 // Baker's read barriers, we need to perform the load of
5243 // mirror::Object::monitor_ *before* the original reference load.
5244 // This load-load ordering is required by the read barrier.
5245 // The fast path/slow path (for Baker's algorithm) should look like:
5246 //
5247 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5248 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5249 // HeapReference<Object> ref = *src; // Original reference load.
5250 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5251 // if (is_gray) {
5252 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5253 // }
5254 //
5255 // Note: the original implementation in ReadBarrier::Barrier is
5256 // slightly more complex as it performs additional checks that we do
5257 // not do here for performance reasons.
5258
5259 Primitive::Type type = Primitive::kPrimNot;
5260 Register ref_reg = RegisterFrom(ref, type);
5261 DCHECK(obj.IsW());
5262 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5263
5264 // /* int32_t */ monitor = obj->monitor_
5265 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5266 if (needs_null_check) {
5267 MaybeRecordImplicitNullCheck(instruction);
5268 }
5269 // /* LockWord */ lock_word = LockWord(monitor)
5270 static_assert(sizeof(LockWord) == sizeof(int32_t),
5271 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005272
Vladimir Marko877a0332016-07-11 19:30:56 +01005273 // Introduce a dependency on the lock_word including rb_state,
5274 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005275 // a memory barrier (which would be more expensive).
Roland Levillain0b671c02016-08-19 12:02:34 +01005276 // `obj` is unchanged by this operation, but its value now depends
5277 // on `temp`.
Vladimir Marko877a0332016-07-11 19:30:56 +01005278 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005279
5280 // The actual reference load.
5281 if (index.IsValid()) {
Roland Levillainbfea3352016-06-23 13:48:47 +01005282 // Load types involving an "index".
5283 if (use_load_acquire) {
5284 // UnsafeGetObjectVolatile intrinsic case.
5285 // Register `index` is not an index in an object array, but an
5286 // offset to an object reference field within object `obj`.
5287 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5288 DCHECK(instruction->GetLocations()->Intrinsified());
5289 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5290 << instruction->AsInvoke()->GetIntrinsic();
5291 DCHECK_EQ(offset, 0U);
5292 DCHECK_EQ(scale_factor, 0U);
5293 DCHECK_EQ(needs_null_check, 0U);
5294 // /* HeapReference<Object> */ ref = *(obj + index)
5295 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5296 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005297 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005298 // ArrayGet and UnsafeGetObject intrinsics cases.
5299 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5300 if (index.IsConstant()) {
5301 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5302 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5303 } else {
Vladimir Marko877a0332016-07-11 19:30:56 +01005304 Register temp2 = temps.AcquireW();
Roland Levillainbfea3352016-06-23 13:48:47 +01005305 __ Add(temp2, obj, offset);
5306 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, scale_factor));
5307 temps.Release(temp2);
5308 }
Roland Levillain44015862016-01-22 11:47:17 +00005309 }
Roland Levillain44015862016-01-22 11:47:17 +00005310 } else {
5311 // /* HeapReference<Object> */ ref = *(obj + offset)
5312 MemOperand field = HeapOperand(obj, offset);
5313 if (use_load_acquire) {
5314 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5315 } else {
5316 Load(type, ref_reg, field);
5317 }
5318 }
5319
5320 // Object* ref = ref_addr->AsMirrorPtr()
5321 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5322
Vladimir Marko953437b2016-08-24 08:30:46 +00005323 // Slow path marking the object `ref` when it is gray.
Roland Levillain44015862016-01-22 11:47:17 +00005324 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005325 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
Roland Levillain44015862016-01-22 11:47:17 +00005326 AddSlowPath(slow_path);
5327
5328 // if (rb_state == ReadBarrier::gray_ptr_)
5329 // ref = ReadBarrier::Mark(ref);
Vladimir Marko877a0332016-07-11 19:30:56 +01005330 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5331 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
5332 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
5333 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
5334 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005335 __ Bind(slow_path->GetExitLabel());
5336}
5337
5338void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5339 Location out,
5340 Location ref,
5341 Location obj,
5342 uint32_t offset,
5343 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005344 DCHECK(kEmitCompilerReadBarrier);
5345
Roland Levillain44015862016-01-22 11:47:17 +00005346 // Insert a slow path based read barrier *after* the reference load.
5347 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005348 // If heap poisoning is enabled, the unpoisoning of the loaded
5349 // reference will be carried out by the runtime within the slow
5350 // path.
5351 //
5352 // Note that `ref` currently does not get unpoisoned (when heap
5353 // poisoning is enabled), which is alright as the `ref` argument is
5354 // not used by the artReadBarrierSlow entry point.
5355 //
5356 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5357 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5358 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5359 AddSlowPath(slow_path);
5360
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005361 __ B(slow_path->GetEntryLabel());
5362 __ Bind(slow_path->GetExitLabel());
5363}
5364
Roland Levillain44015862016-01-22 11:47:17 +00005365void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5366 Location out,
5367 Location ref,
5368 Location obj,
5369 uint32_t offset,
5370 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005371 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005372 // Baker's read barriers shall be handled by the fast path
5373 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5374 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005375 // If heap poisoning is enabled, unpoisoning will be taken care of
5376 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005377 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005378 } else if (kPoisonHeapReferences) {
5379 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5380 }
5381}
5382
Roland Levillain44015862016-01-22 11:47:17 +00005383void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5384 Location out,
5385 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005386 DCHECK(kEmitCompilerReadBarrier);
5387
Roland Levillain44015862016-01-22 11:47:17 +00005388 // Insert a slow path based read barrier *after* the GC root load.
5389 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005390 // Note that GC roots are not affected by heap poisoning, so we do
5391 // not need to do anything special for this here.
5392 SlowPathCodeARM64* slow_path =
5393 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5394 AddSlowPath(slow_path);
5395
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005396 __ B(slow_path->GetEntryLabel());
5397 __ Bind(slow_path->GetExitLabel());
5398}
5399
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005400void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5401 LocationSummary* locations =
5402 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5403 locations->SetInAt(0, Location::RequiresRegister());
5404 locations->SetOut(Location::RequiresRegister());
5405}
5406
5407void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5408 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005409 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005410 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005411 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005412 __ Ldr(XRegisterFrom(locations->Out()),
5413 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005414 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005415 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005416 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005417 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5418 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005419 __ Ldr(XRegisterFrom(locations->Out()),
5420 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005421 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005422}
5423
5424
5425
Alexandre Rames67555f72014-11-18 10:55:16 +00005426#undef __
5427#undef QUICK_ENTRY_POINT
5428
Alexandre Rames5319def2014-10-23 10:03:10 +01005429} // namespace arm64
5430} // namespace art