blob: 54b009af74da323b892f39aa54c4d4066a1b1fe4 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Rames5319def2014-10-23 10:03:10 +010037
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
Roland Levillain22ccc3a2015-11-24 13:10:05 +000044template<class MirrorType>
45class GcRoot;
46
Alexandre Rames5319def2014-10-23 10:03:10 +010047namespace arm64 {
48
Andreas Gampe878d58c2015-01-15 23:24:00 -080049using helpers::CPURegisterFrom;
50using helpers::DRegisterFrom;
51using helpers::FPRegisterFrom;
52using helpers::HeapOperand;
53using helpers::HeapOperandFrom;
54using helpers::InputCPURegisterAt;
55using helpers::InputFPRegisterAt;
56using helpers::InputRegisterAt;
57using helpers::InputOperandAt;
58using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080059using helpers::LocationFrom;
60using helpers::OperandFromMemOperand;
61using helpers::OutputCPURegister;
62using helpers::OutputFPRegister;
63using helpers::OutputRegister;
64using helpers::RegisterFrom;
65using helpers::StackOperandFrom;
66using helpers::VIXLRegCodeFromART;
67using helpers::WRegisterFrom;
68using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000069using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080070using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080071
Alexandre Rames5319def2014-10-23 10:03:10 +010072static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000073// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080074// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
75// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000076static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010077
Alexandre Rames5319def2014-10-23 10:03:10 +010078inline Condition ARM64Condition(IfCondition cond) {
79 switch (cond) {
80 case kCondEQ: return eq;
81 case kCondNE: return ne;
82 case kCondLT: return lt;
83 case kCondLE: return le;
84 case kCondGT: return gt;
85 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070086 case kCondB: return lo;
87 case kCondBE: return ls;
88 case kCondA: return hi;
89 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010090 }
Roland Levillain7f63c522015-07-13 15:54:55 +000091 LOG(FATAL) << "Unreachable";
92 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010093}
94
Vladimir Markod6e069b2016-01-18 11:11:01 +000095inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
96 // The ARM64 condition codes can express all the necessary branches, see the
97 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
98 // There is no dex instruction or HIR that would need the missing conditions
99 // "equal or unordered" or "not equal".
100 switch (cond) {
101 case kCondEQ: return eq;
102 case kCondNE: return ne /* unordered */;
103 case kCondLT: return gt_bias ? cc : lt /* unordered */;
104 case kCondLE: return gt_bias ? ls : le /* unordered */;
105 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
106 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
107 default:
108 LOG(FATAL) << "UNREACHABLE";
109 UNREACHABLE();
110 }
111}
112
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000113Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000114 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
115 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
116 // but we use the exact registers for clarity.
117 if (return_type == Primitive::kPrimFloat) {
118 return LocationFrom(s0);
119 } else if (return_type == Primitive::kPrimDouble) {
120 return LocationFrom(d0);
121 } else if (return_type == Primitive::kPrimLong) {
122 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100123 } else if (return_type == Primitive::kPrimVoid) {
124 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000125 } else {
126 return LocationFrom(w0);
127 }
128}
129
Alexandre Rames5319def2014-10-23 10:03:10 +0100130Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000131 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100132}
133
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700134// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
135#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Alexandre Rames67555f72014-11-18 10:55:16 +0000136#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100137
Zheng Xuda403092015-04-24 17:35:39 +0800138// Calculate memory accessing operand for save/restore live registers.
139static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
140 RegisterSet* register_set,
141 int64_t spill_offset,
142 bool is_save) {
143 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
144 codegen->GetNumberOfCoreRegisters(),
145 register_set->GetFloatingPointRegisters(),
146 codegen->GetNumberOfFloatingPointRegisters()));
147
148 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100149 register_set->GetCoreRegisters() & (~callee_saved_core_registers.GetList()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000150 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100151 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.GetList()));
Zheng Xuda403092015-04-24 17:35:39 +0800152
153 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
154 UseScratchRegisterScope temps(masm);
155
156 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100157 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
158 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800159 int64_t reg_size = kXRegSizeInBytes;
160 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
161 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100162 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800163 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
164 // If the offset does not fit in the instruction's immediate field, use an alternate register
165 // to compute the base address(float point registers spill base address).
166 Register new_base = temps.AcquireSameSizeAs(base);
167 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
168 base = new_base;
169 spill_offset = -core_spill_size;
170 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
171 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
172 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
173 }
174
175 if (is_save) {
176 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
177 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
178 } else {
179 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
180 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
181 }
182}
183
184void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
185 RegisterSet* register_set = locations->GetLiveRegisters();
186 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
187 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
188 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
189 // If the register holds an object, update the stack mask.
190 if (locations->RegisterContainsObject(i)) {
191 locations->SetStackBit(stack_offset / kVRegSize);
192 }
193 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
194 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
195 saved_core_stack_offsets_[i] = stack_offset;
196 stack_offset += kXRegSizeInBytes;
197 }
198 }
199
200 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
201 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
202 register_set->ContainsFloatingPointRegister(i)) {
203 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
204 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
205 saved_fpu_stack_offsets_[i] = stack_offset;
206 stack_offset += kDRegSizeInBytes;
207 }
208 }
209
210 SaveRestoreLiveRegistersHelper(codegen, register_set,
211 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
212}
213
214void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
215 RegisterSet* register_set = locations->GetLiveRegisters();
216 SaveRestoreLiveRegistersHelper(codegen, register_set,
217 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
218}
219
Alexandre Rames5319def2014-10-23 10:03:10 +0100220class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
221 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000222 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100223
Alexandre Rames67555f72014-11-18 10:55:16 +0000224 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100225 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000226 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227
Alexandre Rames5319def2014-10-23 10:03:10 +0100228 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000229 if (instruction_->CanThrowIntoCatchBlock()) {
230 // Live registers will be restored in the catch block if caught.
231 SaveLiveRegisters(codegen, instruction_->GetLocations());
232 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000233 // We're moving two locations to locations that could overlap, so we need a parallel
234 // move resolver.
235 InvokeRuntimeCallingConvention calling_convention;
236 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100237 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
238 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100239 uint32_t entry_point_offset = instruction_->AsBoundsCheck()->IsStringCharAt()
240 ? QUICK_ENTRY_POINT(pThrowStringBounds)
241 : QUICK_ENTRY_POINT(pThrowArrayBounds);
242 arm64_codegen->InvokeRuntime(entry_point_offset, instruction_, instruction_->GetDexPc(), this);
243 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800244 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100245 }
246
Alexandre Rames8158f282015-08-07 10:26:17 +0100247 bool IsFatal() const OVERRIDE { return true; }
248
Alexandre Rames9931f312015-06-19 14:47:01 +0100249 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
250
Alexandre Rames5319def2014-10-23 10:03:10 +0100251 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100252 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
253};
254
Alexandre Rames67555f72014-11-18 10:55:16 +0000255class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
256 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000257 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000258
259 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
260 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
261 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000262 if (instruction_->CanThrowIntoCatchBlock()) {
263 // Live registers will be restored in the catch block if caught.
264 SaveLiveRegisters(codegen, instruction_->GetLocations());
265 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000266 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000267 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800268 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000269 }
270
Alexandre Rames8158f282015-08-07 10:26:17 +0100271 bool IsFatal() const OVERRIDE { return true; }
272
Alexandre Rames9931f312015-06-19 14:47:01 +0100273 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
274
Alexandre Rames67555f72014-11-18 10:55:16 +0000275 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000276 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
277};
278
279class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
280 public:
281 LoadClassSlowPathARM64(HLoadClass* cls,
282 HInstruction* at,
283 uint32_t dex_pc,
284 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000285 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000286 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
287 }
288
289 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
290 LocationSummary* locations = at_->GetLocations();
291 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
292
293 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000294 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000295
296 InvokeRuntimeCallingConvention calling_convention;
297 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000298 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
299 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000300 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800301 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100302 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800303 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100304 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800305 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000306
307 // Move the class to the desired location.
308 Location out = locations->Out();
309 if (out.IsValid()) {
310 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
311 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000312 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000313 }
314
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000315 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000316 __ B(GetExitLabel());
317 }
318
Alexandre Rames9931f312015-06-19 14:47:01 +0100319 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
320
Alexandre Rames67555f72014-11-18 10:55:16 +0000321 private:
322 // The class this slow path will load.
323 HLoadClass* const cls_;
324
325 // The instruction where this slow path is happening.
326 // (Might be the load class or an initialization check).
327 HInstruction* const at_;
328
329 // The dex PC of `at_`.
330 const uint32_t dex_pc_;
331
332 // Whether to initialize the class.
333 const bool do_clinit_;
334
335 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
336};
337
338class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
339 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000340 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000341
342 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
343 LocationSummary* locations = instruction_->GetLocations();
344 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
345 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
346
347 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000348 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000349
350 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000351 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
352 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Alexandre Rames67555f72014-11-18 10:55:16 +0000353 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000354 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100355 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000356 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000357 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000358
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000359 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000360 __ B(GetExitLabel());
361 }
362
Alexandre Rames9931f312015-06-19 14:47:01 +0100363 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
364
Alexandre Rames67555f72014-11-18 10:55:16 +0000365 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000366 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
367};
368
Alexandre Rames5319def2014-10-23 10:03:10 +0100369class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
370 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000371 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100372
Alexandre Rames67555f72014-11-18 10:55:16 +0000373 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
374 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100375 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000376 if (instruction_->CanThrowIntoCatchBlock()) {
377 // Live registers will be restored in the catch block if caught.
378 SaveLiveRegisters(codegen, instruction_->GetLocations());
379 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000380 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000381 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800382 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100383 }
384
Alexandre Rames8158f282015-08-07 10:26:17 +0100385 bool IsFatal() const OVERRIDE { return true; }
386
Alexandre Rames9931f312015-06-19 14:47:01 +0100387 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
388
Alexandre Rames5319def2014-10-23 10:03:10 +0100389 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100390 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
391};
392
393class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
394 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100395 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000396 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100397
Alexandre Rames67555f72014-11-18 10:55:16 +0000398 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
399 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100400 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000401 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000402 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000403 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800404 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000405 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000406 if (successor_ == nullptr) {
407 __ B(GetReturnLabel());
408 } else {
409 __ B(arm64_codegen->GetLabelOf(successor_));
410 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100411 }
412
Scott Wakeling97c72b72016-06-24 16:19:36 +0100413 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100414 DCHECK(successor_ == nullptr);
415 return &return_label_;
416 }
417
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100418 HBasicBlock* GetSuccessor() const {
419 return successor_;
420 }
421
Alexandre Rames9931f312015-06-19 14:47:01 +0100422 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
423
Alexandre Rames5319def2014-10-23 10:03:10 +0100424 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100425 // If not null, the block to branch to after the suspend check.
426 HBasicBlock* const successor_;
427
428 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100429 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100430
431 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
432};
433
Alexandre Rames67555f72014-11-18 10:55:16 +0000434class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
435 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000436 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000437 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000438
439 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000440 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100441 Location class_to_check = locations->InAt(1);
442 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
443 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000444 DCHECK(instruction_->IsCheckCast()
445 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
446 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100447 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000448
Alexandre Rames67555f72014-11-18 10:55:16 +0000449 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000450
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000451 if (!is_fatal_) {
452 SaveLiveRegisters(codegen, locations);
453 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000454
455 // We're moving two locations to locations that could overlap, so we need a parallel
456 // move resolver.
457 InvokeRuntimeCallingConvention calling_convention;
458 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100459 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
460 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000461
462 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000463 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100464 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Roland Levillain888d0672015-11-23 18:53:50 +0000465 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
466 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000467 Primitive::Type ret_type = instruction_->GetType();
468 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
469 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
470 } else {
471 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100472 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800473 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000474 }
475
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000476 if (!is_fatal_) {
477 RestoreLiveRegisters(codegen, locations);
478 __ B(GetExitLabel());
479 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000480 }
481
Alexandre Rames9931f312015-06-19 14:47:01 +0100482 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000483 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100484
Alexandre Rames67555f72014-11-18 10:55:16 +0000485 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000486 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000487
Alexandre Rames67555f72014-11-18 10:55:16 +0000488 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
489};
490
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700491class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
492 public:
Aart Bik42249c32016-01-07 15:33:50 -0800493 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000494 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700495
496 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800497 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700498 __ Bind(GetEntryLabel());
499 SaveLiveRegisters(codegen, instruction_->GetLocations());
Aart Bik42249c32016-01-07 15:33:50 -0800500 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
501 instruction_,
502 instruction_->GetDexPc(),
503 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000504 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700505 }
506
Alexandre Rames9931f312015-06-19 14:47:01 +0100507 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
508
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700509 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700510 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
511};
512
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100513class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
514 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000515 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100516
517 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
518 LocationSummary* locations = instruction_->GetLocations();
519 __ Bind(GetEntryLabel());
520 SaveLiveRegisters(codegen, locations);
521
522 InvokeRuntimeCallingConvention calling_convention;
523 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
524 parallel_move.AddMove(
525 locations->InAt(0),
526 LocationFrom(calling_convention.GetRegisterAt(0)),
527 Primitive::kPrimNot,
528 nullptr);
529 parallel_move.AddMove(
530 locations->InAt(1),
531 LocationFrom(calling_convention.GetRegisterAt(1)),
532 Primitive::kPrimInt,
533 nullptr);
534 parallel_move.AddMove(
535 locations->InAt(2),
536 LocationFrom(calling_convention.GetRegisterAt(2)),
537 Primitive::kPrimNot,
538 nullptr);
539 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
540
541 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
542 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
543 instruction_,
544 instruction_->GetDexPc(),
545 this);
546 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
547 RestoreLiveRegisters(codegen, locations);
548 __ B(GetExitLabel());
549 }
550
551 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
552
553 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100554 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
555};
556
Zheng Xu3927c8b2015-11-18 17:46:25 +0800557void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
558 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000559 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800560
561 // We are about to use the assembler to place literals directly. Make sure we have enough
562 // underlying code buffer and we have generated the jump table with right size.
563 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
564 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
565
566 __ Bind(&table_start_);
567 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
568 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100569 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800570 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100571 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800572 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
573 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
574 Literal<int32_t> literal(jump_offset);
575 __ place(&literal);
576 }
577}
578
Roland Levillain44015862016-01-22 11:47:17 +0000579// Slow path marking an object during a read barrier.
580class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
581 public:
582 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000583 : SlowPathCodeARM64(instruction), out_(out), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000584 DCHECK(kEmitCompilerReadBarrier);
585 }
586
587 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
588
589 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
590 LocationSummary* locations = instruction_->GetLocations();
591 Primitive::Type type = Primitive::kPrimNot;
592 DCHECK(locations->CanCall());
593 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
594 DCHECK(instruction_->IsInstanceFieldGet() ||
595 instruction_->IsStaticFieldGet() ||
596 instruction_->IsArrayGet() ||
597 instruction_->IsLoadClass() ||
598 instruction_->IsLoadString() ||
599 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100600 instruction_->IsCheckCast() ||
601 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
602 instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000603 << "Unexpected instruction in read barrier marking slow path: "
604 << instruction_->DebugName();
605
606 __ Bind(GetEntryLabel());
607 SaveLiveRegisters(codegen, locations);
608
609 InvokeRuntimeCallingConvention calling_convention;
610 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
611 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)), obj_, type);
612 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
613 instruction_,
614 instruction_->GetDexPc(),
615 this);
616 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
617 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
618
619 RestoreLiveRegisters(codegen, locations);
620 __ B(GetExitLabel());
621 }
622
623 private:
Roland Levillain44015862016-01-22 11:47:17 +0000624 const Location out_;
625 const Location obj_;
626
627 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
628};
629
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000630// Slow path generating a read barrier for a heap reference.
631class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
632 public:
633 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
634 Location out,
635 Location ref,
636 Location obj,
637 uint32_t offset,
638 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000639 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000640 out_(out),
641 ref_(ref),
642 obj_(obj),
643 offset_(offset),
644 index_(index) {
645 DCHECK(kEmitCompilerReadBarrier);
646 // If `obj` is equal to `out` or `ref`, it means the initial object
647 // has been overwritten by (or after) the heap object reference load
648 // to be instrumented, e.g.:
649 //
650 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000651 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000652 //
653 // In that case, we have lost the information about the original
654 // object, and the emitted read barrier cannot work properly.
655 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
656 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
657 }
658
659 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
660 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
661 LocationSummary* locations = instruction_->GetLocations();
662 Primitive::Type type = Primitive::kPrimNot;
663 DCHECK(locations->CanCall());
664 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100665 DCHECK(instruction_->IsInstanceFieldGet() ||
666 instruction_->IsStaticFieldGet() ||
667 instruction_->IsArrayGet() ||
668 instruction_->IsInstanceOf() ||
669 instruction_->IsCheckCast() ||
670 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
Roland Levillain44015862016-01-22 11:47:17 +0000671 instruction_->GetLocations()->Intrinsified()))
672 << "Unexpected instruction in read barrier for heap reference slow path: "
673 << instruction_->DebugName();
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000674 // The read barrier instrumentation does not support the
675 // HArm64IntermediateAddress instruction yet.
676 DCHECK(!(instruction_->IsArrayGet() &&
677 instruction_->AsArrayGet()->GetArray()->IsArm64IntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000678
679 __ Bind(GetEntryLabel());
680
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000681 SaveLiveRegisters(codegen, locations);
682
683 // We may have to change the index's value, but as `index_` is a
684 // constant member (like other "inputs" of this slow path),
685 // introduce a copy of it, `index`.
686 Location index = index_;
687 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100688 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000689 if (instruction_->IsArrayGet()) {
690 // Compute the actual memory offset and store it in `index`.
691 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
692 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
693 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
694 // We are about to change the value of `index_reg` (see the
695 // calls to vixl::MacroAssembler::Lsl and
696 // vixl::MacroAssembler::Mov below), but it has
697 // not been saved by the previous call to
698 // art::SlowPathCode::SaveLiveRegisters, as it is a
699 // callee-save register --
700 // art::SlowPathCode::SaveLiveRegisters does not consider
701 // callee-save registers, as it has been designed with the
702 // assumption that callee-save registers are supposed to be
703 // handled by the called function. So, as a callee-save
704 // register, `index_reg` _would_ eventually be saved onto
705 // the stack, but it would be too late: we would have
706 // changed its value earlier. Therefore, we manually save
707 // it here into another freely available register,
708 // `free_reg`, chosen of course among the caller-save
709 // registers (as a callee-save `free_reg` register would
710 // exhibit the same problem).
711 //
712 // Note we could have requested a temporary register from
713 // the register allocator instead; but we prefer not to, as
714 // this is a slow path, and we know we can find a
715 // caller-save register that is available.
716 Register free_reg = FindAvailableCallerSaveRegister(codegen);
717 __ Mov(free_reg.W(), index_reg);
718 index_reg = free_reg;
719 index = LocationFrom(index_reg);
720 } else {
721 // The initial register stored in `index_` has already been
722 // saved in the call to art::SlowPathCode::SaveLiveRegisters
723 // (as it is not a callee-save register), so we can freely
724 // use it.
725 }
726 // Shifting the index value contained in `index_reg` by the scale
727 // factor (2) cannot overflow in practice, as the runtime is
728 // unable to allocate object arrays with a size larger than
729 // 2^26 - 1 (that is, 2^28 - 4 bytes).
730 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
731 static_assert(
732 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
733 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
734 __ Add(index_reg, index_reg, Operand(offset_));
735 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100736 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
737 // intrinsics, `index_` is not shifted by a scale factor of 2
738 // (as in the case of ArrayGet), as it is actually an offset
739 // to an object field within an object.
740 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000741 DCHECK(instruction_->GetLocations()->Intrinsified());
742 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
743 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
744 << instruction_->AsInvoke()->GetIntrinsic();
745 DCHECK_EQ(offset_, 0U);
746 DCHECK(index_.IsRegisterPair());
747 // UnsafeGet's offset location is a register pair, the low
748 // part contains the correct offset.
749 index = index_.ToLow();
750 }
751 }
752
753 // We're moving two or three locations to locations that could
754 // overlap, so we need a parallel move resolver.
755 InvokeRuntimeCallingConvention calling_convention;
756 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
757 parallel_move.AddMove(ref_,
758 LocationFrom(calling_convention.GetRegisterAt(0)),
759 type,
760 nullptr);
761 parallel_move.AddMove(obj_,
762 LocationFrom(calling_convention.GetRegisterAt(1)),
763 type,
764 nullptr);
765 if (index.IsValid()) {
766 parallel_move.AddMove(index,
767 LocationFrom(calling_convention.GetRegisterAt(2)),
768 Primitive::kPrimInt,
769 nullptr);
770 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
771 } else {
772 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
773 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
774 }
775 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
776 instruction_,
777 instruction_->GetDexPc(),
778 this);
779 CheckEntrypointTypes<
780 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
781 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
782
783 RestoreLiveRegisters(codegen, locations);
784
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000785 __ B(GetExitLabel());
786 }
787
788 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
789
790 private:
791 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100792 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
793 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000794 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
795 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
796 return Register(VIXLRegCodeFromART(i), kXRegSize);
797 }
798 }
799 // We shall never fail to find a free caller-save register, as
800 // there are more than two core caller-save registers on ARM64
801 // (meaning it is possible to find one which is different from
802 // `ref` and `obj`).
803 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
804 LOG(FATAL) << "Could not find a free register";
805 UNREACHABLE();
806 }
807
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000808 const Location out_;
809 const Location ref_;
810 const Location obj_;
811 const uint32_t offset_;
812 // An additional location containing an index to an array.
813 // Only used for HArrayGet and the UnsafeGetObject &
814 // UnsafeGetObjectVolatile intrinsics.
815 const Location index_;
816
817 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
818};
819
820// Slow path generating a read barrier for a GC root.
821class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
822 public:
823 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000824 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000825 DCHECK(kEmitCompilerReadBarrier);
826 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000827
828 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
829 LocationSummary* locations = instruction_->GetLocations();
830 Primitive::Type type = Primitive::kPrimNot;
831 DCHECK(locations->CanCall());
832 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000833 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
834 << "Unexpected instruction in read barrier for GC root slow path: "
835 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000836
837 __ Bind(GetEntryLabel());
838 SaveLiveRegisters(codegen, locations);
839
840 InvokeRuntimeCallingConvention calling_convention;
841 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
842 // The argument of the ReadBarrierForRootSlow is not a managed
843 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
844 // thus we need a 64-bit move here, and we cannot use
845 //
846 // arm64_codegen->MoveLocation(
847 // LocationFrom(calling_convention.GetRegisterAt(0)),
848 // root_,
849 // type);
850 //
851 // which would emit a 32-bit move, as `type` is a (32-bit wide)
852 // reference type (`Primitive::kPrimNot`).
853 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
854 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
855 instruction_,
856 instruction_->GetDexPc(),
857 this);
858 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
859 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
860
861 RestoreLiveRegisters(codegen, locations);
862 __ B(GetExitLabel());
863 }
864
865 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
866
867 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000868 const Location out_;
869 const Location root_;
870
871 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
872};
873
Alexandre Rames5319def2014-10-23 10:03:10 +0100874#undef __
875
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100876Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100877 Location next_location;
878 if (type == Primitive::kPrimVoid) {
879 LOG(FATAL) << "Unreachable type " << type;
880 }
881
Alexandre Rames542361f2015-01-29 16:57:31 +0000882 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100883 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
884 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000885 } else if (!Primitive::IsFloatingPointType(type) &&
886 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000887 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
888 } else {
889 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000890 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
891 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100892 }
893
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000894 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000895 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100896 return next_location;
897}
898
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100899Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100900 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100901}
902
Serban Constantinescu579885a2015-02-22 20:51:33 +0000903CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
904 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100905 const CompilerOptions& compiler_options,
906 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100907 : CodeGenerator(graph,
908 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000909 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000910 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100911 callee_saved_core_registers.GetList(),
912 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100913 compiler_options,
914 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100915 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800916 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100917 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000918 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000919 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100920 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000921 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000922 uint32_literals_(std::less<uint32_t>(),
923 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100924 uint64_literals_(std::less<uint64_t>(),
925 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
926 method_patches_(MethodReferenceComparator(),
927 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
928 call_patches_(MethodReferenceComparator(),
929 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
930 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000931 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
932 boot_image_string_patches_(StringReferenceValueComparator(),
933 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
934 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100935 boot_image_type_patches_(TypeReferenceValueComparator(),
936 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
937 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000938 boot_image_address_patches_(std::less<uint32_t>(),
939 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000940 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000941 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000942}
Alexandre Rames5319def2014-10-23 10:03:10 +0100943
Alexandre Rames67555f72014-11-18 10:55:16 +0000944#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100945
Zheng Xu3927c8b2015-11-18 17:46:25 +0800946void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100947 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800948 jump_table->EmitTable(this);
949 }
950}
951
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000952void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800953 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000954 // Ensure we emit the literal pool.
955 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000956
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000957 CodeGenerator::Finalize(allocator);
958}
959
Zheng Xuad4450e2015-04-17 18:48:56 +0800960void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
961 // Note: There are 6 kinds of moves:
962 // 1. constant -> GPR/FPR (non-cycle)
963 // 2. constant -> stack (non-cycle)
964 // 3. GPR/FPR -> GPR/FPR
965 // 4. GPR/FPR -> stack
966 // 5. stack -> GPR/FPR
967 // 6. stack -> stack (non-cycle)
968 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
969 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
970 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
971 // dependency.
972 vixl_temps_.Open(GetVIXLAssembler());
973}
974
975void ParallelMoveResolverARM64::FinishEmitNativeCode() {
976 vixl_temps_.Close();
977}
978
979Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
980 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
981 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
982 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
983 Location scratch = GetScratchLocation(kind);
984 if (!scratch.Equals(Location::NoLocation())) {
985 return scratch;
986 }
987 // Allocate from VIXL temp registers.
988 if (kind == Location::kRegister) {
989 scratch = LocationFrom(vixl_temps_.AcquireX());
990 } else {
991 DCHECK(kind == Location::kFpuRegister);
992 scratch = LocationFrom(vixl_temps_.AcquireD());
993 }
994 AddScratchLocation(scratch);
995 return scratch;
996}
997
998void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
999 if (loc.IsRegister()) {
1000 vixl_temps_.Release(XRegisterFrom(loc));
1001 } else {
1002 DCHECK(loc.IsFpuRegister());
1003 vixl_temps_.Release(DRegisterFrom(loc));
1004 }
1005 RemoveScratchLocation(loc);
1006}
1007
Alexandre Rames3e69f162014-12-10 10:36:50 +00001008void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001009 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001010 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001011}
1012
Alexandre Rames5319def2014-10-23 10:03:10 +01001013void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001014 MacroAssembler* masm = GetVIXLAssembler();
1015 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001016 __ Bind(&frame_entry_label_);
1017
Serban Constantinescu02164b32014-11-13 14:05:07 +00001018 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1019 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001020 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001021 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001022 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001023 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001024 __ Ldr(wzr, MemOperand(temp, 0));
1025 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001026 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001027
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001028 if (!HasEmptyFrame()) {
1029 int frame_size = GetFrameSize();
1030 // Stack layout:
1031 // sp[frame_size - 8] : lr.
1032 // ... : other preserved core registers.
1033 // ... : other preserved fp registers.
1034 // ... : reserved frame space.
1035 // sp[0] : current method.
1036 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001037 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001038 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1039 frame_size - GetCoreSpillSize());
1040 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1041 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001042 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001043}
1044
1045void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001046 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001047 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001048 if (!HasEmptyFrame()) {
1049 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001050 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1051 frame_size - FrameEntrySpillSize());
1052 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1053 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001054 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001055 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001056 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001057 __ Ret();
1058 GetAssembler()->cfi().RestoreState();
1059 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001060}
1061
Scott Wakeling97c72b72016-06-24 16:19:36 +01001062CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001063 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001064 return CPURegList(CPURegister::kRegister, kXRegSize,
1065 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001066}
1067
Scott Wakeling97c72b72016-06-24 16:19:36 +01001068CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001069 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1070 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001071 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1072 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001073}
1074
Alexandre Rames5319def2014-10-23 10:03:10 +01001075void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1076 __ Bind(GetLabelOf(block));
1077}
1078
Calin Juravle175dc732015-08-25 15:42:32 +01001079void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1080 DCHECK(location.IsRegister());
1081 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1082}
1083
Calin Juravlee460d1d2015-09-29 04:52:17 +01001084void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1085 if (location.IsRegister()) {
1086 locations->AddTemp(location);
1087 } else {
1088 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1089 }
1090}
1091
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001092void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001093 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001094 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001095 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001096 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001097 if (value_can_be_null) {
1098 __ Cbz(value, &done);
1099 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001100 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
1101 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001102 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001103 if (value_can_be_null) {
1104 __ Bind(&done);
1105 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001106}
1107
David Brazdil58282f42016-01-14 12:45:10 +00001108void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001109 // Blocked core registers:
1110 // lr : Runtime reserved.
1111 // tr : Runtime reserved.
1112 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1113 // ip1 : VIXL core temp.
1114 // ip0 : VIXL core temp.
1115 //
1116 // Blocked fp registers:
1117 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001118 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1119 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001120 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001121 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001122 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001123
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001124 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001125 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001126 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001127 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001128
David Brazdil58282f42016-01-14 12:45:10 +00001129 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001130 // Stubs do not save callee-save floating point registers. If the graph
1131 // is debuggable, we need to deal with these registers differently. For
1132 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001133 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1134 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001135 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001136 }
1137 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001138}
1139
Alexandre Rames3e69f162014-12-10 10:36:50 +00001140size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1141 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1142 __ Str(reg, MemOperand(sp, stack_index));
1143 return kArm64WordSize;
1144}
1145
1146size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1147 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1148 __ Ldr(reg, MemOperand(sp, stack_index));
1149 return kArm64WordSize;
1150}
1151
1152size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1153 FPRegister reg = FPRegister(reg_id, kDRegSize);
1154 __ Str(reg, MemOperand(sp, stack_index));
1155 return kArm64WordSize;
1156}
1157
1158size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1159 FPRegister reg = FPRegister(reg_id, kDRegSize);
1160 __ Ldr(reg, MemOperand(sp, stack_index));
1161 return kArm64WordSize;
1162}
1163
Alexandre Rames5319def2014-10-23 10:03:10 +01001164void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001165 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001166}
1167
1168void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001169 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001170}
1171
Alexandre Rames67555f72014-11-18 10:55:16 +00001172void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001173 if (constant->IsIntConstant()) {
1174 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1175 } else if (constant->IsLongConstant()) {
1176 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1177 } else if (constant->IsNullConstant()) {
1178 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001179 } else if (constant->IsFloatConstant()) {
1180 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1181 } else {
1182 DCHECK(constant->IsDoubleConstant());
1183 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1184 }
1185}
1186
Alexandre Rames3e69f162014-12-10 10:36:50 +00001187
1188static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1189 DCHECK(constant.IsConstant());
1190 HConstant* cst = constant.GetConstant();
1191 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001192 // Null is mapped to a core W register, which we associate with kPrimInt.
1193 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001194 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1195 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1196 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1197}
1198
Calin Juravlee460d1d2015-09-29 04:52:17 +01001199void CodeGeneratorARM64::MoveLocation(Location destination,
1200 Location source,
1201 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001202 if (source.Equals(destination)) {
1203 return;
1204 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001205
1206 // A valid move can always be inferred from the destination and source
1207 // locations. When moving from and to a register, the argument type can be
1208 // used to generate 32bit instead of 64bit moves. In debug mode we also
1209 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001210 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001211
1212 if (destination.IsRegister() || destination.IsFpuRegister()) {
1213 if (unspecified_type) {
1214 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1215 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001216 (src_cst != nullptr && (src_cst->IsIntConstant()
1217 || src_cst->IsFloatConstant()
1218 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001219 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001220 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001221 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001222 // If the source is a double stack slot or a 64bit constant, a 64bit
1223 // type is appropriate. Else the source is a register, and since the
1224 // type has not been specified, we chose a 64bit type to force a 64bit
1225 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001226 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001227 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001228 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001229 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1230 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1231 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001232 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1233 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1234 __ Ldr(dst, StackOperandFrom(source));
1235 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001236 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001237 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001238 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001239 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001240 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001241 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001242 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001243 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1244 ? Primitive::kPrimLong
1245 : Primitive::kPrimInt;
1246 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1247 }
1248 } else {
1249 DCHECK(source.IsFpuRegister());
1250 if (destination.IsRegister()) {
1251 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1252 ? Primitive::kPrimDouble
1253 : Primitive::kPrimFloat;
1254 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1255 } else {
1256 DCHECK(destination.IsFpuRegister());
1257 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001258 }
1259 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001260 } else { // The destination is not a register. It must be a stack slot.
1261 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1262 if (source.IsRegister() || source.IsFpuRegister()) {
1263 if (unspecified_type) {
1264 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001265 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001266 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001267 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001268 }
1269 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001270 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1271 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1272 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001273 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001274 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1275 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001276 UseScratchRegisterScope temps(GetVIXLAssembler());
1277 HConstant* src_cst = source.GetConstant();
1278 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001279 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001280 temp = temps.AcquireW();
1281 } else if (src_cst->IsLongConstant()) {
1282 temp = temps.AcquireX();
1283 } else if (src_cst->IsFloatConstant()) {
1284 temp = temps.AcquireS();
1285 } else {
1286 DCHECK(src_cst->IsDoubleConstant());
1287 temp = temps.AcquireD();
1288 }
1289 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001290 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001291 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001292 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001293 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001294 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001295 // There is generally less pressure on FP registers.
1296 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001297 __ Ldr(temp, StackOperandFrom(source));
1298 __ Str(temp, StackOperandFrom(destination));
1299 }
1300 }
1301}
1302
1303void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001304 CPURegister dst,
1305 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001306 switch (type) {
1307 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001308 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001309 break;
1310 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001311 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001312 break;
1313 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001314 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001315 break;
1316 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001317 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001318 break;
1319 case Primitive::kPrimInt:
1320 case Primitive::kPrimNot:
1321 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001322 case Primitive::kPrimFloat:
1323 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001324 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001325 __ Ldr(dst, src);
1326 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001327 case Primitive::kPrimVoid:
1328 LOG(FATAL) << "Unreachable type " << type;
1329 }
1330}
1331
Calin Juravle77520bc2015-01-12 18:45:46 +00001332void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001333 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001334 const MemOperand& src,
1335 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001336 MacroAssembler* masm = GetVIXLAssembler();
1337 BlockPoolsScope block_pools(masm);
1338 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001339 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001340 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001341
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001342 DCHECK(!src.IsPreIndex());
1343 DCHECK(!src.IsPostIndex());
1344
1345 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001346 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001347 MemOperand base = MemOperand(temp_base);
1348 switch (type) {
1349 case Primitive::kPrimBoolean:
1350 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001351 if (needs_null_check) {
1352 MaybeRecordImplicitNullCheck(instruction);
1353 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001354 break;
1355 case Primitive::kPrimByte:
1356 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001357 if (needs_null_check) {
1358 MaybeRecordImplicitNullCheck(instruction);
1359 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001360 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1361 break;
1362 case Primitive::kPrimChar:
1363 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001364 if (needs_null_check) {
1365 MaybeRecordImplicitNullCheck(instruction);
1366 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001367 break;
1368 case Primitive::kPrimShort:
1369 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001370 if (needs_null_check) {
1371 MaybeRecordImplicitNullCheck(instruction);
1372 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001373 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1374 break;
1375 case Primitive::kPrimInt:
1376 case Primitive::kPrimNot:
1377 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001378 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001379 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001380 if (needs_null_check) {
1381 MaybeRecordImplicitNullCheck(instruction);
1382 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001383 break;
1384 case Primitive::kPrimFloat:
1385 case Primitive::kPrimDouble: {
1386 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001387 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001388
1389 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1390 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001391 if (needs_null_check) {
1392 MaybeRecordImplicitNullCheck(instruction);
1393 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001394 __ Fmov(FPRegister(dst), temp);
1395 break;
1396 }
1397 case Primitive::kPrimVoid:
1398 LOG(FATAL) << "Unreachable type " << type;
1399 }
1400}
1401
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001402void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001403 CPURegister src,
1404 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001405 switch (type) {
1406 case Primitive::kPrimBoolean:
1407 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001408 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001409 break;
1410 case Primitive::kPrimChar:
1411 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001412 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001413 break;
1414 case Primitive::kPrimInt:
1415 case Primitive::kPrimNot:
1416 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001417 case Primitive::kPrimFloat:
1418 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001419 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001420 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001421 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001422 case Primitive::kPrimVoid:
1423 LOG(FATAL) << "Unreachable type " << type;
1424 }
1425}
1426
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001427void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1428 CPURegister src,
1429 const MemOperand& dst) {
1430 UseScratchRegisterScope temps(GetVIXLAssembler());
1431 Register temp_base = temps.AcquireX();
1432
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001433 DCHECK(!dst.IsPreIndex());
1434 DCHECK(!dst.IsPostIndex());
1435
1436 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001437 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001438 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001439 MemOperand base = MemOperand(temp_base);
1440 switch (type) {
1441 case Primitive::kPrimBoolean:
1442 case Primitive::kPrimByte:
1443 __ Stlrb(Register(src), base);
1444 break;
1445 case Primitive::kPrimChar:
1446 case Primitive::kPrimShort:
1447 __ Stlrh(Register(src), base);
1448 break;
1449 case Primitive::kPrimInt:
1450 case Primitive::kPrimNot:
1451 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001452 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001453 __ Stlr(Register(src), base);
1454 break;
1455 case Primitive::kPrimFloat:
1456 case Primitive::kPrimDouble: {
1457 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001458 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001459
1460 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1461 __ Fmov(temp, FPRegister(src));
1462 __ Stlr(temp, base);
1463 break;
1464 }
1465 case Primitive::kPrimVoid:
1466 LOG(FATAL) << "Unreachable type " << type;
1467 }
1468}
1469
Calin Juravle175dc732015-08-25 15:42:32 +01001470void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1471 HInstruction* instruction,
1472 uint32_t dex_pc,
1473 SlowPathCode* slow_path) {
1474 InvokeRuntime(GetThreadOffset<kArm64WordSize>(entrypoint).Int32Value(),
1475 instruction,
1476 dex_pc,
1477 slow_path);
1478}
1479
Alexandre Rames67555f72014-11-18 10:55:16 +00001480void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1481 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001482 uint32_t dex_pc,
1483 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001484 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001485 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001486 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1487 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001488 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001489}
1490
1491void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001492 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001493 UseScratchRegisterScope temps(GetVIXLAssembler());
1494 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001495 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1496
Serban Constantinescu02164b32014-11-13 14:05:07 +00001497 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001498 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1499 __ Add(temp, class_reg, status_offset);
1500 __ Ldar(temp, HeapOperand(temp));
1501 __ Cmp(temp, mirror::Class::kStatusInitialized);
1502 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001503 __ Bind(slow_path->GetExitLabel());
1504}
Alexandre Rames5319def2014-10-23 10:03:10 +01001505
Roland Levillain44015862016-01-22 11:47:17 +00001506void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001507 BarrierType type = BarrierAll;
1508
1509 switch (kind) {
1510 case MemBarrierKind::kAnyAny:
1511 case MemBarrierKind::kAnyStore: {
1512 type = BarrierAll;
1513 break;
1514 }
1515 case MemBarrierKind::kLoadAny: {
1516 type = BarrierReads;
1517 break;
1518 }
1519 case MemBarrierKind::kStoreStore: {
1520 type = BarrierWrites;
1521 break;
1522 }
1523 default:
1524 LOG(FATAL) << "Unexpected memory barrier " << kind;
1525 }
1526 __ Dmb(InnerShareable, type);
1527}
1528
Serban Constantinescu02164b32014-11-13 14:05:07 +00001529void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1530 HBasicBlock* successor) {
1531 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001532 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1533 if (slow_path == nullptr) {
1534 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1535 instruction->SetSlowPath(slow_path);
1536 codegen_->AddSlowPath(slow_path);
1537 if (successor != nullptr) {
1538 DCHECK(successor->IsLoopHeader());
1539 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1540 }
1541 } else {
1542 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1543 }
1544
Serban Constantinescu02164b32014-11-13 14:05:07 +00001545 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1546 Register temp = temps.AcquireW();
1547
1548 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1549 if (successor == nullptr) {
1550 __ Cbnz(temp, slow_path->GetEntryLabel());
1551 __ Bind(slow_path->GetReturnLabel());
1552 } else {
1553 __ Cbz(temp, codegen_->GetLabelOf(successor));
1554 __ B(slow_path->GetEntryLabel());
1555 // slow_path will return to GetLabelOf(successor).
1556 }
1557}
1558
Alexandre Rames5319def2014-10-23 10:03:10 +01001559InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1560 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001561 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001562 assembler_(codegen->GetAssembler()),
1563 codegen_(codegen) {}
1564
1565#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001566 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001567
1568#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1569
1570enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001571 // Using a base helps identify when we hit such breakpoints.
1572 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001573#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1574 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1575#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1576};
1577
1578#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001579 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001580 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1581 } \
1582 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1583 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1584 locations->SetOut(Location::Any()); \
1585 }
1586 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1587#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1588
1589#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001590#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001591
Alexandre Rames67555f72014-11-18 10:55:16 +00001592void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001593 DCHECK_EQ(instr->InputCount(), 2U);
1594 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1595 Primitive::Type type = instr->GetResultType();
1596 switch (type) {
1597 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001598 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001599 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001600 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001601 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001602 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001603
1604 case Primitive::kPrimFloat:
1605 case Primitive::kPrimDouble:
1606 locations->SetInAt(0, Location::RequiresFpuRegister());
1607 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001608 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001609 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001610
Alexandre Rames5319def2014-10-23 10:03:10 +01001611 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001612 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001613 }
1614}
1615
Alexandre Rames09a99962015-04-15 11:47:56 +01001616void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001617 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1618
1619 bool object_field_get_with_read_barrier =
1620 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001621 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001622 new (GetGraph()->GetArena()) LocationSummary(instruction,
1623 object_field_get_with_read_barrier ?
1624 LocationSummary::kCallOnSlowPath :
1625 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001626 locations->SetInAt(0, Location::RequiresRegister());
1627 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1628 locations->SetOut(Location::RequiresFpuRegister());
1629 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001630 // The output overlaps for an object field get when read barriers
1631 // are enabled: we do not want the load to overwrite the object's
1632 // location, as we need it to emit the read barrier.
1633 locations->SetOut(
1634 Location::RequiresRegister(),
1635 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001636 }
1637}
1638
1639void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1640 const FieldInfo& field_info) {
1641 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001642 LocationSummary* locations = instruction->GetLocations();
1643 Location base_loc = locations->InAt(0);
1644 Location out = locations->Out();
1645 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001646 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001647 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001648 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001649
Roland Levillain44015862016-01-22 11:47:17 +00001650 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1651 // Object FieldGet with Baker's read barrier case.
1652 MacroAssembler* masm = GetVIXLAssembler();
1653 UseScratchRegisterScope temps(masm);
1654 // /* HeapReference<Object> */ out = *(base + offset)
1655 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1656 Register temp = temps.AcquireW();
1657 // Note that potential implicit null checks are handled in this
1658 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1659 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1660 instruction,
1661 out,
1662 base,
1663 offset,
1664 temp,
1665 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001666 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001667 } else {
1668 // General case.
1669 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001670 // Note that a potential implicit null check is handled in this
1671 // CodeGeneratorARM64::LoadAcquire call.
1672 // NB: LoadAcquire will record the pc info if needed.
1673 codegen_->LoadAcquire(
1674 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001675 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001676 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001677 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001678 }
Roland Levillain44015862016-01-22 11:47:17 +00001679 if (field_type == Primitive::kPrimNot) {
1680 // If read barriers are enabled, emit read barriers other than
1681 // Baker's using a slow path (and also unpoison the loaded
1682 // reference, if heap poisoning is enabled).
1683 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1684 }
Roland Levillain4d027112015-07-01 15:41:14 +01001685 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001686}
1687
1688void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1689 LocationSummary* locations =
1690 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1691 locations->SetInAt(0, Location::RequiresRegister());
1692 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1693 locations->SetInAt(1, Location::RequiresFpuRegister());
1694 } else {
1695 locations->SetInAt(1, Location::RequiresRegister());
1696 }
1697}
1698
1699void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001700 const FieldInfo& field_info,
1701 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001702 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001703 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001704
1705 Register obj = InputRegisterAt(instruction, 0);
1706 CPURegister value = InputCPURegisterAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001707 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001708 Offset offset = field_info.GetFieldOffset();
1709 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001710
Roland Levillain4d027112015-07-01 15:41:14 +01001711 {
1712 // We use a block to end the scratch scope before the write barrier, thus
1713 // freeing the temporary registers so they can be used in `MarkGCCard`.
1714 UseScratchRegisterScope temps(GetVIXLAssembler());
1715
1716 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1717 DCHECK(value.IsW());
1718 Register temp = temps.AcquireW();
1719 __ Mov(temp, value.W());
1720 GetAssembler()->PoisonHeapReference(temp.W());
1721 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001722 }
Roland Levillain4d027112015-07-01 15:41:14 +01001723
1724 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001725 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1726 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001727 } else {
1728 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1729 codegen_->MaybeRecordImplicitNullCheck(instruction);
1730 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001731 }
1732
1733 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001734 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001735 }
1736}
1737
Alexandre Rames67555f72014-11-18 10:55:16 +00001738void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001739 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001740
1741 switch (type) {
1742 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001743 case Primitive::kPrimLong: {
1744 Register dst = OutputRegister(instr);
1745 Register lhs = InputRegisterAt(instr, 0);
1746 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001747 if (instr->IsAdd()) {
1748 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001749 } else if (instr->IsAnd()) {
1750 __ And(dst, lhs, rhs);
1751 } else if (instr->IsOr()) {
1752 __ Orr(dst, lhs, rhs);
1753 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001754 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001755 } else if (instr->IsRor()) {
1756 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001757 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001758 __ Ror(dst, lhs, shift);
1759 } else {
1760 // Ensure shift distance is in the same size register as the result. If
1761 // we are rotating a long and the shift comes in a w register originally,
1762 // we don't need to sxtw for use as an x since the shift distances are
1763 // all & reg_bits - 1.
1764 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1765 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001766 } else {
1767 DCHECK(instr->IsXor());
1768 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001769 }
1770 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001771 }
1772 case Primitive::kPrimFloat:
1773 case Primitive::kPrimDouble: {
1774 FPRegister dst = OutputFPRegister(instr);
1775 FPRegister lhs = InputFPRegisterAt(instr, 0);
1776 FPRegister rhs = InputFPRegisterAt(instr, 1);
1777 if (instr->IsAdd()) {
1778 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001779 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001780 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001781 } else {
1782 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001783 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001784 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001785 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001786 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001787 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001788 }
1789}
1790
Serban Constantinescu02164b32014-11-13 14:05:07 +00001791void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1792 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1793
1794 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1795 Primitive::Type type = instr->GetResultType();
1796 switch (type) {
1797 case Primitive::kPrimInt:
1798 case Primitive::kPrimLong: {
1799 locations->SetInAt(0, Location::RequiresRegister());
1800 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1801 locations->SetOut(Location::RequiresRegister());
1802 break;
1803 }
1804 default:
1805 LOG(FATAL) << "Unexpected shift type " << type;
1806 }
1807}
1808
1809void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1810 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1811
1812 Primitive::Type type = instr->GetType();
1813 switch (type) {
1814 case Primitive::kPrimInt:
1815 case Primitive::kPrimLong: {
1816 Register dst = OutputRegister(instr);
1817 Register lhs = InputRegisterAt(instr, 0);
1818 Operand rhs = InputOperandAt(instr, 1);
1819 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001820 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00001821 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001822 if (instr->IsShl()) {
1823 __ Lsl(dst, lhs, shift_value);
1824 } else if (instr->IsShr()) {
1825 __ Asr(dst, lhs, shift_value);
1826 } else {
1827 __ Lsr(dst, lhs, shift_value);
1828 }
1829 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001830 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001831
1832 if (instr->IsShl()) {
1833 __ Lsl(dst, lhs, rhs_reg);
1834 } else if (instr->IsShr()) {
1835 __ Asr(dst, lhs, rhs_reg);
1836 } else {
1837 __ Lsr(dst, lhs, rhs_reg);
1838 }
1839 }
1840 break;
1841 }
1842 default:
1843 LOG(FATAL) << "Unexpected shift operation type " << type;
1844 }
1845}
1846
Alexandre Rames5319def2014-10-23 10:03:10 +01001847void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001848 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001849}
1850
1851void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001852 HandleBinaryOp(instruction);
1853}
1854
1855void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1856 HandleBinaryOp(instruction);
1857}
1858
1859void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1860 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001861}
1862
Artem Serov7fc63502016-02-09 17:15:29 +00001863void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001864 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1865 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1866 locations->SetInAt(0, Location::RequiresRegister());
1867 // There is no immediate variant of negated bitwise instructions in AArch64.
1868 locations->SetInAt(1, Location::RequiresRegister());
1869 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1870}
1871
Artem Serov7fc63502016-02-09 17:15:29 +00001872void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001873 Register dst = OutputRegister(instr);
1874 Register lhs = InputRegisterAt(instr, 0);
1875 Register rhs = InputRegisterAt(instr, 1);
1876
1877 switch (instr->GetOpKind()) {
1878 case HInstruction::kAnd:
1879 __ Bic(dst, lhs, rhs);
1880 break;
1881 case HInstruction::kOr:
1882 __ Orn(dst, lhs, rhs);
1883 break;
1884 case HInstruction::kXor:
1885 __ Eon(dst, lhs, rhs);
1886 break;
1887 default:
1888 LOG(FATAL) << "Unreachable";
1889 }
1890}
1891
Alexandre Rames8626b742015-11-25 16:28:08 +00001892void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1893 HArm64DataProcWithShifterOp* instruction) {
1894 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1895 instruction->GetType() == Primitive::kPrimLong);
1896 LocationSummary* locations =
1897 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1898 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1899 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1900 } else {
1901 locations->SetInAt(0, Location::RequiresRegister());
1902 }
1903 locations->SetInAt(1, Location::RequiresRegister());
1904 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1905}
1906
1907void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1908 HArm64DataProcWithShifterOp* instruction) {
1909 Primitive::Type type = instruction->GetType();
1910 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1911 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1912 Register out = OutputRegister(instruction);
1913 Register left;
1914 if (kind != HInstruction::kNeg) {
1915 left = InputRegisterAt(instruction, 0);
1916 }
1917 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1918 // shifter operand operation, the IR generating `right_reg` (input to the type
1919 // conversion) can have a different type from the current instruction's type,
1920 // so we manually indicate the type.
1921 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001922 int64_t shift_amount = instruction->GetShiftAmount() &
1923 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001924
1925 Operand right_operand(0);
1926
1927 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1928 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1929 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1930 } else {
1931 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1932 }
1933
1934 // Logical binary operations do not support extension operations in the
1935 // operand. Note that VIXL would still manage if it was passed by generating
1936 // the extension as a separate instruction.
1937 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1938 DCHECK(!right_operand.IsExtendedRegister() ||
1939 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1940 kind != HInstruction::kNeg));
1941 switch (kind) {
1942 case HInstruction::kAdd:
1943 __ Add(out, left, right_operand);
1944 break;
1945 case HInstruction::kAnd:
1946 __ And(out, left, right_operand);
1947 break;
1948 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001949 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001950 __ Neg(out, right_operand);
1951 break;
1952 case HInstruction::kOr:
1953 __ Orr(out, left, right_operand);
1954 break;
1955 case HInstruction::kSub:
1956 __ Sub(out, left, right_operand);
1957 break;
1958 case HInstruction::kXor:
1959 __ Eor(out, left, right_operand);
1960 break;
1961 default:
1962 LOG(FATAL) << "Unexpected operation kind: " << kind;
1963 UNREACHABLE();
1964 }
1965}
1966
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001967void LocationsBuilderARM64::VisitArm64IntermediateAddress(HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001968 // The read barrier instrumentation does not support the
1969 // HArm64IntermediateAddress instruction yet.
1970 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001971 LocationSummary* locations =
1972 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1973 locations->SetInAt(0, Location::RequiresRegister());
1974 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1975 locations->SetOut(Location::RequiresRegister());
1976}
1977
1978void InstructionCodeGeneratorARM64::VisitArm64IntermediateAddress(
1979 HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001980 // The read barrier instrumentation does not support the
1981 // HArm64IntermediateAddress instruction yet.
1982 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001983 __ Add(OutputRegister(instruction),
1984 InputRegisterAt(instruction, 0),
1985 Operand(InputOperandAt(instruction, 1)));
1986}
1987
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001988void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001989 LocationSummary* locations =
1990 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001991 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
1992 if (instr->GetOpKind() == HInstruction::kSub &&
1993 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00001994 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001995 // Don't allocate register for Mneg instruction.
1996 } else {
1997 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
1998 Location::RequiresRegister());
1999 }
2000 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2001 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002002 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2003}
2004
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002005void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002006 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002007 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2008 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002009
2010 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2011 // This fixup should be carried out for all multiply-accumulate instructions:
2012 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2013 if (instr->GetType() == Primitive::kPrimLong &&
2014 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2015 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002016 vixl::aarch64::Instruction* prev =
2017 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002018 if (prev->IsLoadOrStore()) {
2019 // Make sure we emit only exactly one nop.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002020 vixl::aarch64::CodeBufferCheckScope scope(masm,
2021 kInstructionSize,
2022 vixl::aarch64::CodeBufferCheckScope::kCheck,
2023 vixl::aarch64::CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002024 __ nop();
2025 }
2026 }
2027
2028 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002029 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002030 __ Madd(res, mul_left, mul_right, accumulator);
2031 } else {
2032 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002033 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002034 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002035 __ Mneg(res, mul_left, mul_right);
2036 } else {
2037 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2038 __ Msub(res, mul_left, mul_right, accumulator);
2039 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002040 }
2041}
2042
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002043void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002044 bool object_array_get_with_read_barrier =
2045 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002046 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002047 new (GetGraph()->GetArena()) LocationSummary(instruction,
2048 object_array_get_with_read_barrier ?
2049 LocationSummary::kCallOnSlowPath :
2050 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002051 locations->SetInAt(0, Location::RequiresRegister());
2052 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002053 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2054 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2055 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002056 // The output overlaps in the case of an object array get with
2057 // read barriers enabled: we do not want the move to overwrite the
2058 // array's location, as we need it to emit the read barrier.
2059 locations->SetOut(
2060 Location::RequiresRegister(),
2061 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002062 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002063}
2064
2065void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002066 Primitive::Type type = instruction->GetType();
2067 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002068 LocationSummary* locations = instruction->GetLocations();
2069 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002070 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002071 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002072
Alexandre Ramesd921d642015-04-16 15:07:16 +01002073 MacroAssembler* masm = GetVIXLAssembler();
2074 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002075 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002076 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002077
Roland Levillain44015862016-01-22 11:47:17 +00002078 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2079 // Object ArrayGet with Baker's read barrier case.
2080 Register temp = temps.AcquireW();
2081 // The read barrier instrumentation does not support the
2082 // HArm64IntermediateAddress instruction yet.
2083 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
2084 // Note that a potential implicit null check is handled in the
2085 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2086 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2087 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002088 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002089 // General case.
2090 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002091 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002092 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2093 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002094 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002095 Register temp = temps.AcquireSameSizeAs(obj);
2096 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
2097 // The read barrier instrumentation does not support the
2098 // HArm64IntermediateAddress instruction yet.
2099 DCHECK(!kEmitCompilerReadBarrier);
2100 // We do not need to compute the intermediate address from the array: the
2101 // input instruction has done it already. See the comment in
2102 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2103 if (kIsDebugBuild) {
2104 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2105 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2106 }
2107 temp = obj;
2108 } else {
2109 __ Add(temp, obj, offset);
2110 }
2111 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2112 }
2113
2114 codegen_->Load(type, OutputCPURegister(instruction), source);
2115 codegen_->MaybeRecordImplicitNullCheck(instruction);
2116
2117 if (type == Primitive::kPrimNot) {
2118 static_assert(
2119 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2120 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2121 Location obj_loc = locations->InAt(0);
2122 if (index.IsConstant()) {
2123 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2124 } else {
2125 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2126 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002127 }
Roland Levillain4d027112015-07-01 15:41:14 +01002128 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002129}
2130
Alexandre Rames5319def2014-10-23 10:03:10 +01002131void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2132 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2133 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002134 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002135}
2136
2137void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002138 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002139 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Markodce016e2016-04-28 13:10:02 +01002140 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002141 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002142}
2143
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002144void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002145 Primitive::Type value_type = instruction->GetComponentType();
2146
2147 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2148 bool object_array_set_with_read_barrier =
2149 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002150 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2151 instruction,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002152 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
2153 LocationSummary::kCallOnSlowPath :
2154 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002155 locations->SetInAt(0, Location::RequiresRegister());
2156 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002157 if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002158 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002159 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002160 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002161 }
2162}
2163
2164void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2165 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002166 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002167 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002168 bool needs_write_barrier =
2169 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002170
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002171 Register array = InputRegisterAt(instruction, 0);
2172 CPURegister value = InputCPURegisterAt(instruction, 2);
2173 CPURegister source = value;
2174 Location index = locations->InAt(1);
2175 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2176 MemOperand destination = HeapOperand(array);
2177 MacroAssembler* masm = GetVIXLAssembler();
2178 BlockPoolsScope block_pools(masm);
2179
2180 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002181 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002182 if (index.IsConstant()) {
2183 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2184 destination = HeapOperand(array, offset);
2185 } else {
2186 UseScratchRegisterScope temps(masm);
2187 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002188 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00002189 // The read barrier instrumentation does not support the
2190 // HArm64IntermediateAddress instruction yet.
2191 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002192 // We do not need to compute the intermediate address from the array: the
2193 // input instruction has done it already. See the comment in
2194 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2195 if (kIsDebugBuild) {
2196 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2197 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2198 }
2199 temp = array;
2200 } else {
2201 __ Add(temp, array, offset);
2202 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002203 destination = HeapOperand(temp,
2204 XRegisterFrom(index),
2205 LSL,
2206 Primitive::ComponentSizeShift(value_type));
2207 }
2208 codegen_->Store(value_type, value, destination);
2209 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002210 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002211 DCHECK(needs_write_barrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002212 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002213 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002214 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002215 {
2216 // We use a block to end the scratch scope before the write barrier, thus
2217 // freeing the temporary registers so they can be used in `MarkGCCard`.
2218 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002219 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002220 if (index.IsConstant()) {
2221 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002222 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002223 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002224 destination = HeapOperand(temp,
2225 XRegisterFrom(index),
2226 LSL,
2227 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002228 }
2229
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002230 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2231 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2232 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2233
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002234 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002235 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2236 codegen_->AddSlowPath(slow_path);
2237 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002238 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002239 __ Cbnz(Register(value), &non_zero);
2240 if (!index.IsConstant()) {
2241 __ Add(temp, array, offset);
2242 }
2243 __ Str(wzr, destination);
2244 codegen_->MaybeRecordImplicitNullCheck(instruction);
2245 __ B(&done);
2246 __ Bind(&non_zero);
2247 }
2248
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002249 if (kEmitCompilerReadBarrier) {
2250 // When read barriers are enabled, the type checking
2251 // instrumentation requires two read barriers:
2252 //
2253 // __ Mov(temp2, temp);
2254 // // /* HeapReference<Class> */ temp = temp->component_type_
2255 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002256 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002257 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2258 //
2259 // // /* HeapReference<Class> */ temp2 = value->klass_
2260 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002261 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002262 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2263 //
2264 // __ Cmp(temp, temp2);
2265 //
2266 // However, the second read barrier may trash `temp`, as it
2267 // is a temporary register, and as such would not be saved
2268 // along with live registers before calling the runtime (nor
2269 // restored afterwards). So in this case, we bail out and
2270 // delegate the work to the array set slow path.
2271 //
2272 // TODO: Extend the register allocator to support a new
2273 // "(locally) live temp" location so as to avoid always
2274 // going into the slow path when read barriers are enabled.
2275 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002276 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002277 Register temp2 = temps.AcquireSameSizeAs(array);
2278 // /* HeapReference<Class> */ temp = array->klass_
2279 __ Ldr(temp, HeapOperand(array, class_offset));
2280 codegen_->MaybeRecordImplicitNullCheck(instruction);
2281 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2282
2283 // /* HeapReference<Class> */ temp = temp->component_type_
2284 __ Ldr(temp, HeapOperand(temp, component_offset));
2285 // /* HeapReference<Class> */ temp2 = value->klass_
2286 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2287 // If heap poisoning is enabled, no need to unpoison `temp`
2288 // nor `temp2`, as we are comparing two poisoned references.
2289 __ Cmp(temp, temp2);
2290
2291 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002292 vixl::aarch64::Label do_put;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002293 __ B(eq, &do_put);
2294 // If heap poisoning is enabled, the `temp` reference has
2295 // not been unpoisoned yet; unpoison it now.
2296 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2297
2298 // /* HeapReference<Class> */ temp = temp->super_class_
2299 __ Ldr(temp, HeapOperand(temp, super_offset));
2300 // If heap poisoning is enabled, no need to unpoison
2301 // `temp`, as we are comparing against null below.
2302 __ Cbnz(temp, slow_path->GetEntryLabel());
2303 __ Bind(&do_put);
2304 } else {
2305 __ B(ne, slow_path->GetEntryLabel());
2306 }
2307 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002308 }
2309 }
2310
2311 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002312 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002313 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002314 __ Mov(temp2, value.W());
2315 GetAssembler()->PoisonHeapReference(temp2);
2316 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002317 }
2318
2319 if (!index.IsConstant()) {
2320 __ Add(temp, array, offset);
2321 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002322 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002323
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002324 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002325 codegen_->MaybeRecordImplicitNullCheck(instruction);
2326 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002327 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002328
2329 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2330
2331 if (done.IsLinked()) {
2332 __ Bind(&done);
2333 }
2334
2335 if (slow_path != nullptr) {
2336 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002337 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002338 }
2339}
2340
Alexandre Rames67555f72014-11-18 10:55:16 +00002341void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002342 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2343 ? LocationSummary::kCallOnSlowPath
2344 : LocationSummary::kNoCall;
2345 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002346 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002347 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002348 if (instruction->HasUses()) {
2349 locations->SetOut(Location::SameAsFirstInput());
2350 }
2351}
2352
2353void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002354 BoundsCheckSlowPathARM64* slow_path =
2355 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002356 codegen_->AddSlowPath(slow_path);
2357
2358 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2359 __ B(slow_path->GetEntryLabel(), hs);
2360}
2361
Alexandre Rames67555f72014-11-18 10:55:16 +00002362void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2363 LocationSummary* locations =
2364 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2365 locations->SetInAt(0, Location::RequiresRegister());
2366 if (check->HasUses()) {
2367 locations->SetOut(Location::SameAsFirstInput());
2368 }
2369}
2370
2371void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2372 // We assume the class is not null.
2373 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2374 check->GetLoadClass(), check, check->GetDexPc(), true);
2375 codegen_->AddSlowPath(slow_path);
2376 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2377}
2378
Roland Levillain1a653882016-03-18 18:05:57 +00002379static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2380 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2381 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2382}
2383
2384void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2385 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2386 Location rhs_loc = instruction->GetLocations()->InAt(1);
2387 if (rhs_loc.IsConstant()) {
2388 // 0.0 is the only immediate that can be encoded directly in
2389 // an FCMP instruction.
2390 //
2391 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2392 // specify that in a floating-point comparison, positive zero
2393 // and negative zero are considered equal, so we can use the
2394 // literal 0.0 for both cases here.
2395 //
2396 // Note however that some methods (Float.equal, Float.compare,
2397 // Float.compareTo, Double.equal, Double.compare,
2398 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2399 // StrictMath.min) consider 0.0 to be (strictly) greater than
2400 // -0.0. So if we ever translate calls to these methods into a
2401 // HCompare instruction, we must handle the -0.0 case with
2402 // care here.
2403 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2404 __ Fcmp(lhs_reg, 0.0);
2405 } else {
2406 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2407 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002408}
2409
Serban Constantinescu02164b32014-11-13 14:05:07 +00002410void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002411 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002412 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2413 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002414 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002415 case Primitive::kPrimBoolean:
2416 case Primitive::kPrimByte:
2417 case Primitive::kPrimShort:
2418 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002419 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002420 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002421 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002422 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002423 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2424 break;
2425 }
2426 case Primitive::kPrimFloat:
2427 case Primitive::kPrimDouble: {
2428 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002429 locations->SetInAt(1,
2430 IsFloatingPointZeroConstant(compare->InputAt(1))
2431 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2432 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002433 locations->SetOut(Location::RequiresRegister());
2434 break;
2435 }
2436 default:
2437 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2438 }
2439}
2440
2441void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2442 Primitive::Type in_type = compare->InputAt(0)->GetType();
2443
2444 // 0 if: left == right
2445 // 1 if: left > right
2446 // -1 if: left < right
2447 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002448 case Primitive::kPrimBoolean:
2449 case Primitive::kPrimByte:
2450 case Primitive::kPrimShort:
2451 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002452 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002453 case Primitive::kPrimLong: {
2454 Register result = OutputRegister(compare);
2455 Register left = InputRegisterAt(compare, 0);
2456 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002457 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002458 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2459 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002460 break;
2461 }
2462 case Primitive::kPrimFloat:
2463 case Primitive::kPrimDouble: {
2464 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002465 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002466 __ Cset(result, ne);
2467 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002468 break;
2469 }
2470 default:
2471 LOG(FATAL) << "Unimplemented compare type " << in_type;
2472 }
2473}
2474
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002475void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002476 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002477
2478 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2479 locations->SetInAt(0, Location::RequiresFpuRegister());
2480 locations->SetInAt(1,
2481 IsFloatingPointZeroConstant(instruction->InputAt(1))
2482 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2483 : Location::RequiresFpuRegister());
2484 } else {
2485 // Integer cases.
2486 locations->SetInAt(0, Location::RequiresRegister());
2487 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2488 }
2489
David Brazdilb3e773e2016-01-26 11:28:37 +00002490 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002491 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002492 }
2493}
2494
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002495void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002496 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002497 return;
2498 }
2499
2500 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002501 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002502 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002503
Roland Levillain7f63c522015-07-13 15:54:55 +00002504 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002505 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002506 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002507 } else {
2508 // Integer cases.
2509 Register lhs = InputRegisterAt(instruction, 0);
2510 Operand rhs = InputOperandAt(instruction, 1);
2511 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002512 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002513 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002514}
2515
2516#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2517 M(Equal) \
2518 M(NotEqual) \
2519 M(LessThan) \
2520 M(LessThanOrEqual) \
2521 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002522 M(GreaterThanOrEqual) \
2523 M(Below) \
2524 M(BelowOrEqual) \
2525 M(Above) \
2526 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002527#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002528void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2529void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002530FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002531#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002532#undef FOR_EACH_CONDITION_INSTRUCTION
2533
Zheng Xuc6667102015-05-15 16:08:45 +08002534void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2535 DCHECK(instruction->IsDiv() || instruction->IsRem());
2536
2537 LocationSummary* locations = instruction->GetLocations();
2538 Location second = locations->InAt(1);
2539 DCHECK(second.IsConstant());
2540
2541 Register out = OutputRegister(instruction);
2542 Register dividend = InputRegisterAt(instruction, 0);
2543 int64_t imm = Int64FromConstant(second.GetConstant());
2544 DCHECK(imm == 1 || imm == -1);
2545
2546 if (instruction->IsRem()) {
2547 __ Mov(out, 0);
2548 } else {
2549 if (imm == 1) {
2550 __ Mov(out, dividend);
2551 } else {
2552 __ Neg(out, dividend);
2553 }
2554 }
2555}
2556
2557void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2558 DCHECK(instruction->IsDiv() || instruction->IsRem());
2559
2560 LocationSummary* locations = instruction->GetLocations();
2561 Location second = locations->InAt(1);
2562 DCHECK(second.IsConstant());
2563
2564 Register out = OutputRegister(instruction);
2565 Register dividend = InputRegisterAt(instruction, 0);
2566 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002567 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002568 int ctz_imm = CTZ(abs_imm);
2569
2570 UseScratchRegisterScope temps(GetVIXLAssembler());
2571 Register temp = temps.AcquireSameSizeAs(out);
2572
2573 if (instruction->IsDiv()) {
2574 __ Add(temp, dividend, abs_imm - 1);
2575 __ Cmp(dividend, 0);
2576 __ Csel(out, temp, dividend, lt);
2577 if (imm > 0) {
2578 __ Asr(out, out, ctz_imm);
2579 } else {
2580 __ Neg(out, Operand(out, ASR, ctz_imm));
2581 }
2582 } else {
2583 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2584 __ Asr(temp, dividend, bits - 1);
2585 __ Lsr(temp, temp, bits - ctz_imm);
2586 __ Add(out, dividend, temp);
2587 __ And(out, out, abs_imm - 1);
2588 __ Sub(out, out, temp);
2589 }
2590}
2591
2592void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2593 DCHECK(instruction->IsDiv() || instruction->IsRem());
2594
2595 LocationSummary* locations = instruction->GetLocations();
2596 Location second = locations->InAt(1);
2597 DCHECK(second.IsConstant());
2598
2599 Register out = OutputRegister(instruction);
2600 Register dividend = InputRegisterAt(instruction, 0);
2601 int64_t imm = Int64FromConstant(second.GetConstant());
2602
2603 Primitive::Type type = instruction->GetResultType();
2604 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2605
2606 int64_t magic;
2607 int shift;
2608 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2609
2610 UseScratchRegisterScope temps(GetVIXLAssembler());
2611 Register temp = temps.AcquireSameSizeAs(out);
2612
2613 // temp = get_high(dividend * magic)
2614 __ Mov(temp, magic);
2615 if (type == Primitive::kPrimLong) {
2616 __ Smulh(temp, dividend, temp);
2617 } else {
2618 __ Smull(temp.X(), dividend, temp);
2619 __ Lsr(temp.X(), temp.X(), 32);
2620 }
2621
2622 if (imm > 0 && magic < 0) {
2623 __ Add(temp, temp, dividend);
2624 } else if (imm < 0 && magic > 0) {
2625 __ Sub(temp, temp, dividend);
2626 }
2627
2628 if (shift != 0) {
2629 __ Asr(temp, temp, shift);
2630 }
2631
2632 if (instruction->IsDiv()) {
2633 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2634 } else {
2635 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2636 // TODO: Strength reduction for msub.
2637 Register temp_imm = temps.AcquireSameSizeAs(out);
2638 __ Mov(temp_imm, imm);
2639 __ Msub(out, temp, temp_imm, dividend);
2640 }
2641}
2642
2643void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2644 DCHECK(instruction->IsDiv() || instruction->IsRem());
2645 Primitive::Type type = instruction->GetResultType();
2646 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2647
2648 LocationSummary* locations = instruction->GetLocations();
2649 Register out = OutputRegister(instruction);
2650 Location second = locations->InAt(1);
2651
2652 if (second.IsConstant()) {
2653 int64_t imm = Int64FromConstant(second.GetConstant());
2654
2655 if (imm == 0) {
2656 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2657 } else if (imm == 1 || imm == -1) {
2658 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002659 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002660 DivRemByPowerOfTwo(instruction);
2661 } else {
2662 DCHECK(imm <= -2 || imm >= 2);
2663 GenerateDivRemWithAnyConstant(instruction);
2664 }
2665 } else {
2666 Register dividend = InputRegisterAt(instruction, 0);
2667 Register divisor = InputRegisterAt(instruction, 1);
2668 if (instruction->IsDiv()) {
2669 __ Sdiv(out, dividend, divisor);
2670 } else {
2671 UseScratchRegisterScope temps(GetVIXLAssembler());
2672 Register temp = temps.AcquireSameSizeAs(out);
2673 __ Sdiv(temp, dividend, divisor);
2674 __ Msub(out, temp, divisor, dividend);
2675 }
2676 }
2677}
2678
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002679void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2680 LocationSummary* locations =
2681 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2682 switch (div->GetResultType()) {
2683 case Primitive::kPrimInt:
2684 case Primitive::kPrimLong:
2685 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002686 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002687 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2688 break;
2689
2690 case Primitive::kPrimFloat:
2691 case Primitive::kPrimDouble:
2692 locations->SetInAt(0, Location::RequiresFpuRegister());
2693 locations->SetInAt(1, Location::RequiresFpuRegister());
2694 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2695 break;
2696
2697 default:
2698 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2699 }
2700}
2701
2702void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2703 Primitive::Type type = div->GetResultType();
2704 switch (type) {
2705 case Primitive::kPrimInt:
2706 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002707 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002708 break;
2709
2710 case Primitive::kPrimFloat:
2711 case Primitive::kPrimDouble:
2712 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2713 break;
2714
2715 default:
2716 LOG(FATAL) << "Unexpected div type " << type;
2717 }
2718}
2719
Alexandre Rames67555f72014-11-18 10:55:16 +00002720void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002721 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2722 ? LocationSummary::kCallOnSlowPath
2723 : LocationSummary::kNoCall;
2724 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002725 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2726 if (instruction->HasUses()) {
2727 locations->SetOut(Location::SameAsFirstInput());
2728 }
2729}
2730
2731void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2732 SlowPathCodeARM64* slow_path =
2733 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2734 codegen_->AddSlowPath(slow_path);
2735 Location value = instruction->GetLocations()->InAt(0);
2736
Alexandre Rames3e69f162014-12-10 10:36:50 +00002737 Primitive::Type type = instruction->GetType();
2738
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002739 if (!Primitive::IsIntegralType(type)) {
2740 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002741 return;
2742 }
2743
Alexandre Rames67555f72014-11-18 10:55:16 +00002744 if (value.IsConstant()) {
2745 int64_t divisor = Int64ConstantFrom(value);
2746 if (divisor == 0) {
2747 __ B(slow_path->GetEntryLabel());
2748 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002749 // A division by a non-null constant is valid. We don't need to perform
2750 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002751 }
2752 } else {
2753 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2754 }
2755}
2756
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002757void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2758 LocationSummary* locations =
2759 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2760 locations->SetOut(Location::ConstantLocation(constant));
2761}
2762
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002763void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2764 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002765 // Will be generated at use site.
2766}
2767
Alexandre Rames5319def2014-10-23 10:03:10 +01002768void LocationsBuilderARM64::VisitExit(HExit* exit) {
2769 exit->SetLocations(nullptr);
2770}
2771
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002772void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002773}
2774
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002775void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2776 LocationSummary* locations =
2777 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2778 locations->SetOut(Location::ConstantLocation(constant));
2779}
2780
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002781void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002782 // Will be generated at use site.
2783}
2784
David Brazdilfc6a86a2015-06-26 10:33:45 +00002785void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002786 DCHECK(!successor->IsExitBlock());
2787 HBasicBlock* block = got->GetBlock();
2788 HInstruction* previous = got->GetPrevious();
2789 HLoopInformation* info = block->GetLoopInformation();
2790
David Brazdil46e2a392015-03-16 17:31:52 +00002791 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002792 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2793 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2794 return;
2795 }
2796 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2797 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2798 }
2799 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002800 __ B(codegen_->GetLabelOf(successor));
2801 }
2802}
2803
David Brazdilfc6a86a2015-06-26 10:33:45 +00002804void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2805 got->SetLocations(nullptr);
2806}
2807
2808void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2809 HandleGoto(got, got->GetSuccessor());
2810}
2811
2812void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2813 try_boundary->SetLocations(nullptr);
2814}
2815
2816void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2817 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2818 if (!successor->IsExitBlock()) {
2819 HandleGoto(try_boundary, successor);
2820 }
2821}
2822
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002823void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002824 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002825 vixl::aarch64::Label* true_target,
2826 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002827 // FP branching requires both targets to be explicit. If either of the targets
2828 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002829 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002830 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002831
David Brazdil0debae72015-11-12 18:37:00 +00002832 if (true_target == nullptr && false_target == nullptr) {
2833 // Nothing to do. The code always falls through.
2834 return;
2835 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002836 // Constant condition, statically compared against "true" (integer value 1).
2837 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002838 if (true_target != nullptr) {
2839 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002840 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002841 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002842 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002843 if (false_target != nullptr) {
2844 __ B(false_target);
2845 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002846 }
David Brazdil0debae72015-11-12 18:37:00 +00002847 return;
2848 }
2849
2850 // The following code generates these patterns:
2851 // (1) true_target == nullptr && false_target != nullptr
2852 // - opposite condition true => branch to false_target
2853 // (2) true_target != nullptr && false_target == nullptr
2854 // - condition true => branch to true_target
2855 // (3) true_target != nullptr && false_target != nullptr
2856 // - condition true => branch to true_target
2857 // - branch to false_target
2858 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002859 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002860 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002861 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002862 if (true_target == nullptr) {
2863 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2864 } else {
2865 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2866 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002867 } else {
2868 // The condition instruction has not been materialized, use its inputs as
2869 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002870 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002871
David Brazdil0debae72015-11-12 18:37:00 +00002872 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002873 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002874 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002875 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002876 IfCondition opposite_condition = condition->GetOppositeCondition();
2877 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002878 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002879 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002880 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002881 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002882 // Integer cases.
2883 Register lhs = InputRegisterAt(condition, 0);
2884 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002885
2886 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01002887 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002888 if (true_target == nullptr) {
2889 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2890 non_fallthrough_target = false_target;
2891 } else {
2892 arm64_cond = ARM64Condition(condition->GetCondition());
2893 non_fallthrough_target = true_target;
2894 }
2895
Aart Bik086d27e2016-01-20 17:02:00 -08002896 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01002897 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002898 switch (arm64_cond) {
2899 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002900 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002901 break;
2902 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002903 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002904 break;
2905 case lt:
2906 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002907 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002908 break;
2909 case ge:
2910 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002911 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002912 break;
2913 default:
2914 // Without the `static_cast` the compiler throws an error for
2915 // `-Werror=sign-promo`.
2916 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2917 }
2918 } else {
2919 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002920 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002921 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002922 }
2923 }
David Brazdil0debae72015-11-12 18:37:00 +00002924
2925 // If neither branch falls through (case 3), the conditional branch to `true_target`
2926 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2927 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002928 __ B(false_target);
2929 }
David Brazdil0debae72015-11-12 18:37:00 +00002930
2931 if (fallthrough_target.IsLinked()) {
2932 __ Bind(&fallthrough_target);
2933 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002934}
2935
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002936void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2937 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002938 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002939 locations->SetInAt(0, Location::RequiresRegister());
2940 }
2941}
2942
2943void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002944 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2945 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002946 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
2947 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
2948 true_target = nullptr;
2949 }
2950 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
2951 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
2952 false_target = nullptr;
2953 }
David Brazdil0debae72015-11-12 18:37:00 +00002954 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002955}
2956
2957void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2958 LocationSummary* locations = new (GetGraph()->GetArena())
2959 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002960 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002961 locations->SetInAt(0, Location::RequiresRegister());
2962 }
2963}
2964
2965void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002966 SlowPathCodeARM64* slow_path =
2967 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002968 GenerateTestAndBranch(deoptimize,
2969 /* condition_input_index */ 0,
2970 slow_path->GetEntryLabel(),
2971 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002972}
2973
David Brazdilc0b601b2016-02-08 14:20:45 +00002974static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2975 return condition->IsCondition() &&
2976 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2977}
2978
Alexandre Rames880f1192016-06-13 16:04:50 +01002979static inline Condition GetConditionForSelect(HCondition* condition) {
2980 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00002981 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
2982 : ARM64Condition(cond);
2983}
2984
David Brazdil74eb1b22015-12-14 11:44:01 +00002985void LocationsBuilderARM64::VisitSelect(HSelect* select) {
2986 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01002987 if (Primitive::IsFloatingPointType(select->GetType())) {
2988 locations->SetInAt(0, Location::RequiresFpuRegister());
2989 locations->SetInAt(1, Location::RequiresFpuRegister());
2990 locations->SetOut(Location::RequiresFpuRegister());
2991 } else {
2992 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
2993 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
2994 bool is_true_value_constant = cst_true_value != nullptr;
2995 bool is_false_value_constant = cst_false_value != nullptr;
2996 // Ask VIXL whether we should synthesize constants in registers.
2997 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
2998 Operand true_op = is_true_value_constant ?
2999 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3000 Operand false_op = is_false_value_constant ?
3001 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3002 bool true_value_in_register = false;
3003 bool false_value_in_register = false;
3004 MacroAssembler::GetCselSynthesisInformation(
3005 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3006 true_value_in_register |= !is_true_value_constant;
3007 false_value_in_register |= !is_false_value_constant;
3008
3009 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3010 : Location::ConstantLocation(cst_true_value));
3011 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3012 : Location::ConstantLocation(cst_false_value));
3013 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003014 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003015
David Brazdil74eb1b22015-12-14 11:44:01 +00003016 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3017 locations->SetInAt(2, Location::RequiresRegister());
3018 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003019}
3020
3021void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003022 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003023 Condition csel_cond;
3024
3025 if (IsBooleanValueOrMaterializedCondition(cond)) {
3026 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003027 // Use the condition flags set by the previous instruction.
3028 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003029 } else {
3030 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003031 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003032 }
3033 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003034 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003035 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003036 } else {
3037 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003038 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003039 }
3040
Alexandre Rames880f1192016-06-13 16:04:50 +01003041 if (Primitive::IsFloatingPointType(select->GetType())) {
3042 __ Fcsel(OutputFPRegister(select),
3043 InputFPRegisterAt(select, 1),
3044 InputFPRegisterAt(select, 0),
3045 csel_cond);
3046 } else {
3047 __ Csel(OutputRegister(select),
3048 InputOperandAt(select, 1),
3049 InputOperandAt(select, 0),
3050 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003051 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003052}
3053
David Srbecky0cf44932015-12-09 14:09:59 +00003054void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3055 new (GetGraph()->GetArena()) LocationSummary(info);
3056}
3057
David Srbeckyd28f4a02016-03-14 17:14:24 +00003058void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3059 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003060}
3061
3062void CodeGeneratorARM64::GenerateNop() {
3063 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003064}
3065
Alexandre Rames5319def2014-10-23 10:03:10 +01003066void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003067 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003068}
3069
3070void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003071 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003072}
3073
3074void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003075 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003076}
3077
3078void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003079 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003080}
3081
Roland Levillain44015862016-01-22 11:47:17 +00003082static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3083 return kEmitCompilerReadBarrier &&
3084 (kUseBakerReadBarrier ||
3085 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3086 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3087 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3088}
3089
Alexandre Rames67555f72014-11-18 10:55:16 +00003090void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003091 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003092 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3093 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003094 case TypeCheckKind::kExactCheck:
3095 case TypeCheckKind::kAbstractClassCheck:
3096 case TypeCheckKind::kClassHierarchyCheck:
3097 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003098 call_kind =
3099 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003100 break;
3101 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003102 case TypeCheckKind::kUnresolvedCheck:
3103 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003104 call_kind = LocationSummary::kCallOnSlowPath;
3105 break;
3106 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003107
Alexandre Rames67555f72014-11-18 10:55:16 +00003108 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003109 locations->SetInAt(0, Location::RequiresRegister());
3110 locations->SetInAt(1, Location::RequiresRegister());
3111 // The "out" register is used as a temporary, so it overlaps with the inputs.
3112 // Note that TypeCheckSlowPathARM64 uses this register too.
3113 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3114 // When read barriers are enabled, we need a temporary register for
3115 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003116 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003117 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003118 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003119}
3120
3121void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003122 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003123 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003124 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003125 Register obj = InputRegisterAt(instruction, 0);
3126 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003127 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003128 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003129 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3130 locations->GetTemp(0) :
3131 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003132 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3133 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3134 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3135 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003136
Scott Wakeling97c72b72016-06-24 16:19:36 +01003137 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003138 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003139
3140 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003141 // Avoid null check if we know `obj` is not null.
3142 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003143 __ Cbz(obj, &zero);
3144 }
3145
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003146 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003147 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003148
Roland Levillain44015862016-01-22 11:47:17 +00003149 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003150 case TypeCheckKind::kExactCheck: {
3151 __ Cmp(out, cls);
3152 __ Cset(out, eq);
3153 if (zero.IsLinked()) {
3154 __ B(&done);
3155 }
3156 break;
3157 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003158
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003159 case TypeCheckKind::kAbstractClassCheck: {
3160 // If the class is abstract, we eagerly fetch the super class of the
3161 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003162 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003163 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003164 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003165 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003166 // If `out` is null, we use it for the result, and jump to `done`.
3167 __ Cbz(out, &done);
3168 __ Cmp(out, cls);
3169 __ B(ne, &loop);
3170 __ Mov(out, 1);
3171 if (zero.IsLinked()) {
3172 __ B(&done);
3173 }
3174 break;
3175 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003176
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003177 case TypeCheckKind::kClassHierarchyCheck: {
3178 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003179 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003180 __ Bind(&loop);
3181 __ Cmp(out, cls);
3182 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003183 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003184 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003185 __ Cbnz(out, &loop);
3186 // If `out` is null, we use it for the result, and jump to `done`.
3187 __ B(&done);
3188 __ Bind(&success);
3189 __ Mov(out, 1);
3190 if (zero.IsLinked()) {
3191 __ B(&done);
3192 }
3193 break;
3194 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003195
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003196 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003197 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003198 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003199 __ Cmp(out, cls);
3200 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003201 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003202 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003203 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003204 // If `out` is null, we use it for the result, and jump to `done`.
3205 __ Cbz(out, &done);
3206 __ Ldrh(out, HeapOperand(out, primitive_offset));
3207 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3208 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003209 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003210 __ Mov(out, 1);
3211 __ B(&done);
3212 break;
3213 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003214
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003215 case TypeCheckKind::kArrayCheck: {
3216 __ Cmp(out, cls);
3217 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003218 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3219 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003220 codegen_->AddSlowPath(slow_path);
3221 __ B(ne, slow_path->GetEntryLabel());
3222 __ Mov(out, 1);
3223 if (zero.IsLinked()) {
3224 __ B(&done);
3225 }
3226 break;
3227 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003228
Calin Juravle98893e12015-10-02 21:05:03 +01003229 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003230 case TypeCheckKind::kInterfaceCheck: {
3231 // Note that we indeed only call on slow path, but we always go
3232 // into the slow path for the unresolved and interface check
3233 // cases.
3234 //
3235 // We cannot directly call the InstanceofNonTrivial runtime
3236 // entry point without resorting to a type checking slow path
3237 // here (i.e. by calling InvokeRuntime directly), as it would
3238 // require to assign fixed registers for the inputs of this
3239 // HInstanceOf instruction (following the runtime calling
3240 // convention), which might be cluttered by the potential first
3241 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003242 //
3243 // TODO: Introduce a new runtime entry point taking the object
3244 // to test (instead of its class) as argument, and let it deal
3245 // with the read barrier issues. This will let us refactor this
3246 // case of the `switch` code as it was previously (with a direct
3247 // call to the runtime not using a type checking slow path).
3248 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003249 DCHECK(locations->OnlyCallsOnSlowPath());
3250 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3251 /* is_fatal */ false);
3252 codegen_->AddSlowPath(slow_path);
3253 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003254 if (zero.IsLinked()) {
3255 __ B(&done);
3256 }
3257 break;
3258 }
3259 }
3260
3261 if (zero.IsLinked()) {
3262 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003263 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003264 }
3265
3266 if (done.IsLinked()) {
3267 __ Bind(&done);
3268 }
3269
3270 if (slow_path != nullptr) {
3271 __ Bind(slow_path->GetExitLabel());
3272 }
3273}
3274
3275void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3276 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3277 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3278
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003279 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3280 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003281 case TypeCheckKind::kExactCheck:
3282 case TypeCheckKind::kAbstractClassCheck:
3283 case TypeCheckKind::kClassHierarchyCheck:
3284 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003285 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3286 LocationSummary::kCallOnSlowPath :
3287 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003288 break;
3289 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003290 case TypeCheckKind::kUnresolvedCheck:
3291 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003292 call_kind = LocationSummary::kCallOnSlowPath;
3293 break;
3294 }
3295
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003296 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3297 locations->SetInAt(0, Location::RequiresRegister());
3298 locations->SetInAt(1, Location::RequiresRegister());
3299 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3300 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003301 // When read barriers are enabled, we need an additional temporary
3302 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003303 if (TypeCheckNeedsATemporary(type_check_kind)) {
3304 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003305 }
3306}
3307
3308void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003309 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003310 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003311 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003312 Register obj = InputRegisterAt(instruction, 0);
3313 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003314 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003315 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3316 locations->GetTemp(1) :
3317 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003318 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003319 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3320 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3321 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3322 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003323
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003324 bool is_type_check_slow_path_fatal =
3325 (type_check_kind == TypeCheckKind::kExactCheck ||
3326 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3327 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3328 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3329 !instruction->CanThrowIntoCatchBlock();
3330 SlowPathCodeARM64* type_check_slow_path =
3331 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3332 is_type_check_slow_path_fatal);
3333 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003334
Scott Wakeling97c72b72016-06-24 16:19:36 +01003335 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003336 // Avoid null check if we know obj is not null.
3337 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003338 __ Cbz(obj, &done);
3339 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003340
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003341 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003342 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003343
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003344 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003345 case TypeCheckKind::kExactCheck:
3346 case TypeCheckKind::kArrayCheck: {
3347 __ Cmp(temp, cls);
3348 // Jump to slow path for throwing the exception or doing a
3349 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003350 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003351 break;
3352 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003353
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003354 case TypeCheckKind::kAbstractClassCheck: {
3355 // If the class is abstract, we eagerly fetch the super class of the
3356 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003357 vixl::aarch64::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003358 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003359 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003360 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003361
3362 // If the class reference currently in `temp` is not null, jump
3363 // to the `compare_classes` label to compare it with the checked
3364 // class.
3365 __ Cbnz(temp, &compare_classes);
3366 // Otherwise, jump to the slow path to throw the exception.
3367 //
3368 // But before, move back the object's class into `temp` before
3369 // going into the slow path, as it has been overwritten in the
3370 // meantime.
3371 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003372 GenerateReferenceLoadTwoRegisters(
3373 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003374 __ B(type_check_slow_path->GetEntryLabel());
3375
3376 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003377 __ Cmp(temp, cls);
3378 __ B(ne, &loop);
3379 break;
3380 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003381
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003382 case TypeCheckKind::kClassHierarchyCheck: {
3383 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003384 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003385 __ Bind(&loop);
3386 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003387 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003388
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003389 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003390 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003391
3392 // If the class reference currently in `temp` is not null, jump
3393 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003394 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003395 // Otherwise, jump to the slow path to throw the exception.
3396 //
3397 // But before, move back the object's class into `temp` before
3398 // going into the slow path, as it has been overwritten in the
3399 // meantime.
3400 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003401 GenerateReferenceLoadTwoRegisters(
3402 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003403 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003404 break;
3405 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003406
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003407 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003408 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003409 vixl::aarch64::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003410 __ Cmp(temp, cls);
3411 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003412
3413 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003414 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003415 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003416
3417 // If the component type is not null (i.e. the object is indeed
3418 // an array), jump to label `check_non_primitive_component_type`
3419 // to further check that this component type is not a primitive
3420 // type.
3421 __ Cbnz(temp, &check_non_primitive_component_type);
3422 // Otherwise, jump to the slow path to throw the exception.
3423 //
3424 // But before, move back the object's class into `temp` before
3425 // going into the slow path, as it has been overwritten in the
3426 // meantime.
3427 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003428 GenerateReferenceLoadTwoRegisters(
3429 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003430 __ B(type_check_slow_path->GetEntryLabel());
3431
3432 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003433 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3434 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003435 __ Cbz(temp, &done);
3436 // Same comment as above regarding `temp` and the slow path.
3437 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003438 GenerateReferenceLoadTwoRegisters(
3439 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003440 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003441 break;
3442 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003443
Calin Juravle98893e12015-10-02 21:05:03 +01003444 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003445 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003446 // We always go into the type check slow path for the unresolved
3447 // and interface check cases.
3448 //
3449 // We cannot directly call the CheckCast runtime entry point
3450 // without resorting to a type checking slow path here (i.e. by
3451 // calling InvokeRuntime directly), as it would require to
3452 // assign fixed registers for the inputs of this HInstanceOf
3453 // instruction (following the runtime calling convention), which
3454 // might be cluttered by the potential first read barrier
3455 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003456 //
3457 // TODO: Introduce a new runtime entry point taking the object
3458 // to test (instead of its class) as argument, and let it deal
3459 // with the read barrier issues. This will let us refactor this
3460 // case of the `switch` code as it was previously (with a direct
3461 // call to the runtime not using a type checking slow path).
3462 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003463 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003464 break;
3465 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003466 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003467
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003468 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003469}
3470
Alexandre Rames5319def2014-10-23 10:03:10 +01003471void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3472 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3473 locations->SetOut(Location::ConstantLocation(constant));
3474}
3475
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003476void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003477 // Will be generated at use site.
3478}
3479
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003480void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3481 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3482 locations->SetOut(Location::ConstantLocation(constant));
3483}
3484
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003485void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003486 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003487}
3488
Calin Juravle175dc732015-08-25 15:42:32 +01003489void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3490 // The trampoline uses the same calling convention as dex calling conventions,
3491 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3492 // the method_idx.
3493 HandleInvoke(invoke);
3494}
3495
3496void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3497 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3498}
3499
Alexandre Rames5319def2014-10-23 10:03:10 +01003500void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003501 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003502 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003503}
3504
Alexandre Rames67555f72014-11-18 10:55:16 +00003505void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3506 HandleInvoke(invoke);
3507}
3508
3509void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3510 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003511 LocationSummary* locations = invoke->GetLocations();
3512 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003513 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003514 Offset class_offset = mirror::Object::ClassOffset();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003515 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003516
3517 // The register ip1 is required to be used for the hidden argument in
3518 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003519 MacroAssembler* masm = GetVIXLAssembler();
3520 UseScratchRegisterScope scratch_scope(masm);
3521 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003522 scratch_scope.Exclude(ip1);
3523 __ Mov(ip1, invoke->GetDexMethodIndex());
3524
Alexandre Rames67555f72014-11-18 10:55:16 +00003525 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003526 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003527 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003528 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003529 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003530 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003531 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003532 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003533 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003534 // Instead of simply (possibly) unpoisoning `temp` here, we should
3535 // emit a read barrier for the previous class reference load.
3536 // However this is not required in practice, as this is an
3537 // intermediate/temporary reference and because the current
3538 // concurrent copying collector keeps the from-space memory
3539 // intact/accessible until the end of the marking phase (the
3540 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003541 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003542 __ Ldr(temp,
3543 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3544 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
3545 invoke->GetImtIndex() % ImTable::kSize, kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003546 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003547 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003548 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003549 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003550 // lr();
3551 __ Blr(lr);
3552 DCHECK(!codegen_->IsLeafMethod());
3553 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3554}
3555
3556void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003557 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3558 if (intrinsic.TryDispatch(invoke)) {
3559 return;
3560 }
3561
Alexandre Rames67555f72014-11-18 10:55:16 +00003562 HandleInvoke(invoke);
3563}
3564
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003565void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003566 // Explicit clinit checks triggered by static invokes must have been pruned by
3567 // art::PrepareForRegisterAllocation.
3568 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003569
Andreas Gampe878d58c2015-01-15 23:24:00 -08003570 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3571 if (intrinsic.TryDispatch(invoke)) {
3572 return;
3573 }
3574
Alexandre Rames67555f72014-11-18 10:55:16 +00003575 HandleInvoke(invoke);
3576}
3577
Andreas Gampe878d58c2015-01-15 23:24:00 -08003578static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3579 if (invoke->GetLocations()->Intrinsified()) {
3580 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3581 intrinsic.Dispatch(invoke);
3582 return true;
3583 }
3584 return false;
3585}
3586
Vladimir Markodc151b22015-10-15 18:02:30 +01003587HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3588 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3589 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003590 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003591 return desired_dispatch_info;
3592}
3593
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003594void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003595 // For better instruction scheduling we load the direct code pointer before the method pointer.
3596 bool direct_code_loaded = false;
3597 switch (invoke->GetCodePtrLocation()) {
3598 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3599 // LR = code address from literal pool with link-time patch.
3600 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3601 direct_code_loaded = true;
3602 break;
3603 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3604 // LR = invoke->GetDirectCodePtr();
3605 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3606 direct_code_loaded = true;
3607 break;
3608 default:
3609 break;
3610 }
3611
Andreas Gampe878d58c2015-01-15 23:24:00 -08003612 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003613 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3614 switch (invoke->GetMethodLoadKind()) {
3615 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3616 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003617 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003618 break;
3619 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003620 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003621 break;
3622 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3623 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003624 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003625 break;
3626 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3627 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003628 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003629 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3630 break;
3631 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3632 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003633 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3634 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003635 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003636 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003637 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003638 __ Bind(adrp_label);
3639 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003640 }
Vladimir Marko58155012015-08-19 12:49:41 +00003641 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003642 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003643 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003644 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003645 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003646 __ Bind(ldr_label);
3647 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003648 }
Vladimir Marko58155012015-08-19 12:49:41 +00003649 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003650 }
Vladimir Marko58155012015-08-19 12:49:41 +00003651 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003652 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003653 Register reg = XRegisterFrom(temp);
3654 Register method_reg;
3655 if (current_method.IsRegister()) {
3656 method_reg = XRegisterFrom(current_method);
3657 } else {
3658 DCHECK(invoke->GetLocations()->Intrinsified());
3659 DCHECK(!current_method.IsValid());
3660 method_reg = reg;
3661 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3662 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003663
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003664 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003665 __ Ldr(reg.X(),
3666 MemOperand(method_reg.X(),
3667 ArtMethod::DexCacheResolvedMethodsOffset(kArm64WordSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003668 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003669 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3670 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003671 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3672 break;
3673 }
3674 }
3675
3676 switch (invoke->GetCodePtrLocation()) {
3677 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3678 __ Bl(&frame_entry_label_);
3679 break;
3680 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3681 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003682 vixl::aarch64::Label* label = &relative_call_patches_.back().label;
3683 SingleEmissionCheckScope guard(GetVIXLAssembler());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003684 __ Bind(label);
3685 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003686 break;
3687 }
3688 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3689 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3690 // LR prepared above for better instruction scheduling.
3691 DCHECK(direct_code_loaded);
3692 // lr()
3693 __ Blr(lr);
3694 break;
3695 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3696 // LR = callee_method->entry_point_from_quick_compiled_code_;
3697 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003698 XRegisterFrom(callee_method),
Vladimir Marko58155012015-08-19 12:49:41 +00003699 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value()));
3700 // lr()
3701 __ Blr(lr);
3702 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003703 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003704
Andreas Gampe878d58c2015-01-15 23:24:00 -08003705 DCHECK(!IsLeafMethod());
3706}
3707
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003708void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003709 // Use the calling convention instead of the location of the receiver, as
3710 // intrinsics may have put the receiver in a different register. In the intrinsics
3711 // slow path, the arguments have been moved to the right place, so here we are
3712 // guaranteed that the receiver is the first register of the calling convention.
3713 InvokeDexCallingConvention calling_convention;
3714 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003715 Register temp = XRegisterFrom(temp_in);
3716 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3717 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3718 Offset class_offset = mirror::Object::ClassOffset();
3719 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
3720
3721 BlockPoolsScope block_pools(GetVIXLAssembler());
3722
3723 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003724 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003725 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003726 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003727 // Instead of simply (possibly) unpoisoning `temp` here, we should
3728 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003729 // intermediate/temporary reference and because the current
3730 // concurrent copying collector keeps the from-space memory
3731 // intact/accessible until the end of the marking phase (the
3732 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003733 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3734 // temp = temp->GetMethodAt(method_offset);
3735 __ Ldr(temp, MemOperand(temp, method_offset));
3736 // lr = temp->GetEntryPoint();
3737 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3738 // lr();
3739 __ Blr(lr);
3740}
3741
Scott Wakeling97c72b72016-06-24 16:19:36 +01003742vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
3743 const DexFile& dex_file,
3744 uint32_t string_index,
3745 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003746 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3747}
3748
Scott Wakeling97c72b72016-06-24 16:19:36 +01003749vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
3750 const DexFile& dex_file,
3751 uint32_t type_index,
3752 vixl::aarch64::Label* adrp_label) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003753 return NewPcRelativePatch(dex_file, type_index, adrp_label, &pc_relative_type_patches_);
3754}
3755
Scott Wakeling97c72b72016-06-24 16:19:36 +01003756vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
3757 const DexFile& dex_file,
3758 uint32_t element_offset,
3759 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003760 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3761}
3762
Scott Wakeling97c72b72016-06-24 16:19:36 +01003763vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
3764 const DexFile& dex_file,
3765 uint32_t offset_or_index,
3766 vixl::aarch64::Label* adrp_label,
3767 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003768 // Add a patch entry and return the label.
3769 patches->emplace_back(dex_file, offset_or_index);
3770 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003771 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003772 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3773 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3774 return label;
3775}
3776
Scott Wakeling97c72b72016-06-24 16:19:36 +01003777vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003778 const DexFile& dex_file, uint32_t string_index) {
3779 return boot_image_string_patches_.GetOrCreate(
3780 StringReference(&dex_file, string_index),
3781 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3782}
3783
Scott Wakeling97c72b72016-06-24 16:19:36 +01003784vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003785 const DexFile& dex_file, uint32_t type_index) {
3786 return boot_image_type_patches_.GetOrCreate(
3787 TypeReference(&dex_file, type_index),
3788 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3789}
3790
Scott Wakeling97c72b72016-06-24 16:19:36 +01003791vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
3792 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003793 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3794 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3795 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3796}
3797
Scott Wakeling97c72b72016-06-24 16:19:36 +01003798vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(
3799 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003800 return DeduplicateUint64Literal(address);
3801}
3802
Vladimir Marko58155012015-08-19 12:49:41 +00003803void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3804 DCHECK(linker_patches->empty());
3805 size_t size =
3806 method_patches_.size() +
3807 call_patches_.size() +
3808 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003809 pc_relative_dex_cache_patches_.size() +
3810 boot_image_string_patches_.size() +
3811 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003812 boot_image_type_patches_.size() +
3813 pc_relative_type_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003814 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003815 linker_patches->reserve(size);
3816 for (const auto& entry : method_patches_) {
3817 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003818 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3819 linker_patches->push_back(LinkerPatch::MethodPatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003820 target_method.dex_file,
3821 target_method.dex_method_index));
3822 }
3823 for (const auto& entry : call_patches_) {
3824 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003825 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3826 linker_patches->push_back(LinkerPatch::CodePatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003827 target_method.dex_file,
3828 target_method.dex_method_index));
3829 }
Scott Wakeling97c72b72016-06-24 16:19:36 +01003830 for (const MethodPatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) {
3831 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003832 info.target_method.dex_file,
3833 info.target_method.dex_method_index));
3834 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003835 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003836 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003837 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003838 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003839 info.offset_or_index));
3840 }
3841 for (const auto& entry : boot_image_string_patches_) {
3842 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003843 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3844 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003845 target_string.dex_file,
3846 target_string.string_index));
3847 }
3848 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003849 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003850 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003851 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003852 info.offset_or_index));
3853 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003854 for (const auto& entry : boot_image_type_patches_) {
3855 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003856 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3857 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003858 target_type.dex_file,
3859 target_type.type_index));
3860 }
3861 for (const PcRelativePatchInfo& info : pc_relative_type_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003862 linker_patches->push_back(LinkerPatch::RelativeTypePatch(info.label.GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003863 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003864 info.pc_insn_label->GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003865 info.offset_or_index));
3866 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003867 for (const auto& entry : boot_image_address_patches_) {
3868 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003869 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3870 linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003871 }
3872}
3873
Scott Wakeling97c72b72016-06-24 16:19:36 +01003874vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003875 Uint32ToLiteralMap* map) {
3876 return map->GetOrCreate(
3877 value,
3878 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3879}
3880
Scott Wakeling97c72b72016-06-24 16:19:36 +01003881vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003882 return uint64_literals_.GetOrCreate(
3883 value,
3884 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003885}
3886
Scott Wakeling97c72b72016-06-24 16:19:36 +01003887vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003888 MethodReference target_method,
3889 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003890 return map->GetOrCreate(
3891 target_method,
3892 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003893}
3894
Scott Wakeling97c72b72016-06-24 16:19:36 +01003895vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003896 MethodReference target_method) {
3897 return DeduplicateMethodLiteral(target_method, &method_patches_);
3898}
3899
Scott Wakeling97c72b72016-06-24 16:19:36 +01003900vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003901 MethodReference target_method) {
3902 return DeduplicateMethodLiteral(target_method, &call_patches_);
3903}
3904
3905
Andreas Gampe878d58c2015-01-15 23:24:00 -08003906void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003907 // Explicit clinit checks triggered by static invokes must have been pruned by
3908 // art::PrepareForRegisterAllocation.
3909 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003910
Andreas Gampe878d58c2015-01-15 23:24:00 -08003911 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3912 return;
3913 }
3914
Alexandre Ramesd921d642015-04-16 15:07:16 +01003915 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003916 LocationSummary* locations = invoke->GetLocations();
3917 codegen_->GenerateStaticOrDirectCall(
3918 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003919 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003920}
3921
3922void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003923 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3924 return;
3925 }
3926
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003927 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003928 DCHECK(!codegen_->IsLeafMethod());
3929 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3930}
3931
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003932HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
3933 HLoadClass::LoadKind desired_class_load_kind) {
3934 if (kEmitCompilerReadBarrier) {
3935 switch (desired_class_load_kind) {
3936 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3937 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3938 case HLoadClass::LoadKind::kBootImageAddress:
3939 // TODO: Implement for read barrier.
3940 return HLoadClass::LoadKind::kDexCacheViaMethod;
3941 default:
3942 break;
3943 }
3944 }
3945 switch (desired_class_load_kind) {
3946 case HLoadClass::LoadKind::kReferrersClass:
3947 break;
3948 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3949 DCHECK(!GetCompilerOptions().GetCompilePic());
3950 break;
3951 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3952 DCHECK(GetCompilerOptions().GetCompilePic());
3953 break;
3954 case HLoadClass::LoadKind::kBootImageAddress:
3955 break;
3956 case HLoadClass::LoadKind::kDexCacheAddress:
3957 DCHECK(Runtime::Current()->UseJitCompilation());
3958 break;
3959 case HLoadClass::LoadKind::kDexCachePcRelative:
3960 DCHECK(!Runtime::Current()->UseJitCompilation());
3961 break;
3962 case HLoadClass::LoadKind::kDexCacheViaMethod:
3963 break;
3964 }
3965 return desired_class_load_kind;
3966}
3967
Alexandre Rames67555f72014-11-18 10:55:16 +00003968void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003969 if (cls->NeedsAccessCheck()) {
3970 InvokeRuntimeCallingConvention calling_convention;
3971 CodeGenerator::CreateLoadClassLocationSummary(
3972 cls,
3973 LocationFrom(calling_convention.GetRegisterAt(0)),
Scott Wakeling97c72b72016-06-24 16:19:36 +01003974 LocationFrom(vixl::aarch64::x0),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003975 /* code_generator_supports_read_barrier */ true);
3976 return;
3977 }
3978
3979 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
3980 ? LocationSummary::kCallOnSlowPath
3981 : LocationSummary::kNoCall;
3982 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3983 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3984 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3985 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3986 locations->SetInAt(0, Location::RequiresRegister());
3987 }
3988 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00003989}
3990
3991void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003992 if (cls->NeedsAccessCheck()) {
3993 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
3994 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
3995 cls,
3996 cls->GetDexPc(),
3997 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003998 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003999 return;
4000 }
4001
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004002 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004003 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004004
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004005 bool generate_null_check = false;
4006 switch (cls->GetLoadKind()) {
4007 case HLoadClass::LoadKind::kReferrersClass: {
4008 DCHECK(!cls->CanCallRuntime());
4009 DCHECK(!cls->MustGenerateClinitCheck());
4010 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4011 Register current_method = InputRegisterAt(cls, 0);
4012 GenerateGcRootFieldLoad(
4013 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4014 break;
4015 }
4016 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4017 DCHECK(!kEmitCompilerReadBarrier);
4018 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4019 cls->GetTypeIndex()));
4020 break;
4021 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
4022 DCHECK(!kEmitCompilerReadBarrier);
4023 // Add ADRP with its PC-relative type patch.
4024 const DexFile& dex_file = cls->GetDexFile();
4025 uint32_t type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004026 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004027 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004028 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004029 __ Bind(adrp_label);
4030 __ adrp(out.X(), /* offset placeholder */ 0);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004031 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004032 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004033 vixl::aarch64::Label* add_label =
4034 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004035 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004036 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004037 __ Bind(add_label);
4038 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004039 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004040 break;
4041 }
4042 case HLoadClass::LoadKind::kBootImageAddress: {
4043 DCHECK(!kEmitCompilerReadBarrier);
4044 DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
4045 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
4046 break;
4047 }
4048 case HLoadClass::LoadKind::kDexCacheAddress: {
4049 DCHECK_NE(cls->GetAddress(), 0u);
4050 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4051 // that gives a 16KiB range. To try and reduce the number of literals if we load
4052 // multiple types, simply split the dex cache address to a 16KiB aligned base
4053 // loaded from a literal and the remaining offset embedded in the load.
4054 static_assert(sizeof(GcRoot<mirror::Class>) == 4u, "Expected GC root to be 4 bytes.");
4055 DCHECK_ALIGNED(cls->GetAddress(), 4u);
4056 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4057 uint64_t base_address = cls->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4058 uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits);
4059 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4060 // /* GcRoot<mirror::Class> */ out = *(base_address + offset)
4061 GenerateGcRootFieldLoad(cls, out_loc, out.X(), offset);
4062 generate_null_check = !cls->IsInDexCache();
4063 break;
4064 }
4065 case HLoadClass::LoadKind::kDexCachePcRelative: {
4066 // Add ADRP with its PC-relative DexCache access patch.
4067 const DexFile& dex_file = cls->GetDexFile();
4068 uint32_t element_offset = cls->GetDexCacheElementOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004069 vixl::aarch64::Label* adrp_label =
4070 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004071 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004072 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004073 __ Bind(adrp_label);
4074 __ adrp(out.X(), /* offset placeholder */ 0);
4075 }
4076 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004077 vixl::aarch64::Label* ldr_label =
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004078 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4079 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4080 GenerateGcRootFieldLoad(cls, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4081 generate_null_check = !cls->IsInDexCache();
4082 break;
4083 }
4084 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4085 MemberOffset resolved_types_offset =
4086 ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
4087 // /* GcRoot<mirror::Class>[] */ out =
4088 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4089 Register current_method = InputRegisterAt(cls, 0);
4090 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
4091 // /* GcRoot<mirror::Class> */ out = out[type_index]
4092 GenerateGcRootFieldLoad(
4093 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
4094 generate_null_check = !cls->IsInDexCache();
4095 break;
4096 }
4097 }
4098
4099 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4100 DCHECK(cls->CanCallRuntime());
4101 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4102 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4103 codegen_->AddSlowPath(slow_path);
4104 if (generate_null_check) {
4105 __ Cbz(out, slow_path->GetEntryLabel());
4106 }
4107 if (cls->MustGenerateClinitCheck()) {
4108 GenerateClassInitializationCheck(slow_path, out);
4109 } else {
4110 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004111 }
4112 }
4113}
4114
David Brazdilcb1c0552015-08-04 16:22:25 +01004115static MemOperand GetExceptionTlsAddress() {
4116 return MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
4117}
4118
Alexandre Rames67555f72014-11-18 10:55:16 +00004119void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4120 LocationSummary* locations =
4121 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4122 locations->SetOut(Location::RequiresRegister());
4123}
4124
4125void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004126 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4127}
4128
4129void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4130 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4131}
4132
4133void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4134 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004135}
4136
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004137HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4138 HLoadString::LoadKind desired_string_load_kind) {
4139 if (kEmitCompilerReadBarrier) {
4140 switch (desired_string_load_kind) {
4141 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4142 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4143 case HLoadString::LoadKind::kBootImageAddress:
4144 // TODO: Implement for read barrier.
4145 return HLoadString::LoadKind::kDexCacheViaMethod;
4146 default:
4147 break;
4148 }
4149 }
4150 switch (desired_string_load_kind) {
4151 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4152 DCHECK(!GetCompilerOptions().GetCompilePic());
4153 break;
4154 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4155 DCHECK(GetCompilerOptions().GetCompilePic());
4156 break;
4157 case HLoadString::LoadKind::kBootImageAddress:
4158 break;
4159 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01004160 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004161 break;
4162 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01004163 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004164 break;
4165 case HLoadString::LoadKind::kDexCacheViaMethod:
4166 break;
4167 }
4168 return desired_string_load_kind;
4169}
4170
Alexandre Rames67555f72014-11-18 10:55:16 +00004171void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004172 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004173 ? LocationSummary::kCallOnSlowPath
4174 : LocationSummary::kNoCall;
4175 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004176 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4177 locations->SetInAt(0, Location::RequiresRegister());
4178 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004179 locations->SetOut(Location::RequiresRegister());
4180}
4181
4182void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004183 Location out_loc = load->GetLocations()->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00004184 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004185
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004186 switch (load->GetLoadKind()) {
4187 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4188 DCHECK(!kEmitCompilerReadBarrier);
4189 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4190 load->GetStringIndex()));
4191 return; // No dex cache slow path.
4192 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4193 DCHECK(!kEmitCompilerReadBarrier);
4194 // Add ADRP with its PC-relative String patch.
4195 const DexFile& dex_file = load->GetDexFile();
4196 uint32_t string_index = load->GetStringIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004197 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004198 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004199 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004200 __ Bind(adrp_label);
4201 __ adrp(out.X(), /* offset placeholder */ 0);
4202 }
4203 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004204 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004205 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4206 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004207 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004208 __ Bind(add_label);
4209 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4210 }
4211 return; // No dex cache slow path.
4212 }
4213 case HLoadString::LoadKind::kBootImageAddress: {
4214 DCHECK(!kEmitCompilerReadBarrier);
4215 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4216 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4217 return; // No dex cache slow path.
4218 }
4219 case HLoadString::LoadKind::kDexCacheAddress: {
4220 DCHECK_NE(load->GetAddress(), 0u);
4221 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4222 // that gives a 16KiB range. To try and reduce the number of literals if we load
4223 // multiple strings, simply split the dex cache address to a 16KiB aligned base
4224 // loaded from a literal and the remaining offset embedded in the load.
4225 static_assert(sizeof(GcRoot<mirror::String>) == 4u, "Expected GC root to be 4 bytes.");
4226 DCHECK_ALIGNED(load->GetAddress(), 4u);
4227 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4228 uint64_t base_address = load->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4229 uint32_t offset = load->GetAddress() & MaxInt<uint64_t>(offset_bits);
4230 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004231 // /* GcRoot<mirror::String> */ out = *(base_address + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004232 GenerateGcRootFieldLoad(load, out_loc, out.X(), offset);
4233 break;
4234 }
4235 case HLoadString::LoadKind::kDexCachePcRelative: {
4236 // Add ADRP with its PC-relative DexCache access patch.
4237 const DexFile& dex_file = load->GetDexFile();
4238 uint32_t element_offset = load->GetDexCacheElementOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004239 vixl::aarch64::Label* adrp_label =
4240 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004241 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004242 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004243 __ Bind(adrp_label);
4244 __ adrp(out.X(), /* offset placeholder */ 0);
4245 }
4246 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004247 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004248 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004249 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004250 GenerateGcRootFieldLoad(load, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4251 break;
4252 }
4253 case HLoadString::LoadKind::kDexCacheViaMethod: {
4254 Register current_method = InputRegisterAt(load, 0);
4255 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4256 GenerateGcRootFieldLoad(
4257 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4258 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
4259 __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
4260 // /* GcRoot<mirror::String> */ out = out[string_index]
4261 GenerateGcRootFieldLoad(
4262 load, out_loc, out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()));
4263 break;
4264 }
4265 default:
4266 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
4267 UNREACHABLE();
4268 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004269
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004270 if (!load->IsInDexCache()) {
4271 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
4272 codegen_->AddSlowPath(slow_path);
4273 __ Cbz(out, slow_path->GetEntryLabel());
4274 __ Bind(slow_path->GetExitLabel());
4275 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004276}
4277
Alexandre Rames5319def2014-10-23 10:03:10 +01004278void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4279 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4280 locations->SetOut(Location::ConstantLocation(constant));
4281}
4282
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004283void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004284 // Will be generated at use site.
4285}
4286
Alexandre Rames67555f72014-11-18 10:55:16 +00004287void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4288 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004289 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004290 InvokeRuntimeCallingConvention calling_convention;
4291 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4292}
4293
4294void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4295 codegen_->InvokeRuntime(instruction->IsEnter()
4296 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
4297 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004298 instruction->GetDexPc(),
4299 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004300 if (instruction->IsEnter()) {
4301 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4302 } else {
4303 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4304 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004305}
4306
Alexandre Rames42d641b2014-10-27 14:00:51 +00004307void LocationsBuilderARM64::VisitMul(HMul* mul) {
4308 LocationSummary* locations =
4309 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4310 switch (mul->GetResultType()) {
4311 case Primitive::kPrimInt:
4312 case Primitive::kPrimLong:
4313 locations->SetInAt(0, Location::RequiresRegister());
4314 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004315 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004316 break;
4317
4318 case Primitive::kPrimFloat:
4319 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004320 locations->SetInAt(0, Location::RequiresFpuRegister());
4321 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004322 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004323 break;
4324
4325 default:
4326 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4327 }
4328}
4329
4330void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4331 switch (mul->GetResultType()) {
4332 case Primitive::kPrimInt:
4333 case Primitive::kPrimLong:
4334 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4335 break;
4336
4337 case Primitive::kPrimFloat:
4338 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004339 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004340 break;
4341
4342 default:
4343 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4344 }
4345}
4346
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004347void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4348 LocationSummary* locations =
4349 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4350 switch (neg->GetResultType()) {
4351 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004352 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004353 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004354 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004355 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004356
4357 case Primitive::kPrimFloat:
4358 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004359 locations->SetInAt(0, Location::RequiresFpuRegister());
4360 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004361 break;
4362
4363 default:
4364 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4365 }
4366}
4367
4368void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4369 switch (neg->GetResultType()) {
4370 case Primitive::kPrimInt:
4371 case Primitive::kPrimLong:
4372 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4373 break;
4374
4375 case Primitive::kPrimFloat:
4376 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004377 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004378 break;
4379
4380 default:
4381 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4382 }
4383}
4384
4385void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4386 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004387 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004388 InvokeRuntimeCallingConvention calling_convention;
4389 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004390 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004391 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004392 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004393}
4394
4395void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4396 LocationSummary* locations = instruction->GetLocations();
4397 InvokeRuntimeCallingConvention calling_convention;
4398 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4399 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004400 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004401 // Note: if heap poisoning is enabled, the entry point takes cares
4402 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01004403 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4404 instruction,
4405 instruction->GetDexPc(),
4406 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004407 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004408}
4409
Alexandre Rames5319def2014-10-23 10:03:10 +01004410void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4411 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004412 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004413 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004414 if (instruction->IsStringAlloc()) {
4415 locations->AddTemp(LocationFrom(kArtMethodRegister));
4416 } else {
4417 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4418 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4419 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004420 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4421}
4422
4423void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004424 // Note: if heap poisoning is enabled, the entry point takes cares
4425 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004426 if (instruction->IsStringAlloc()) {
4427 // String is allocated through StringFactory. Call NewEmptyString entry point.
4428 Location temp = instruction->GetLocations()->GetTemp(0);
4429 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
4430 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4431 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4432 __ Blr(lr);
4433 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4434 } else {
4435 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4436 instruction,
4437 instruction->GetDexPc(),
4438 nullptr);
4439 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4440 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004441}
4442
4443void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4444 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004445 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004446 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004447}
4448
4449void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004450 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004451 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004452 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004453 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004454 break;
4455
4456 default:
4457 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4458 }
4459}
4460
David Brazdil66d126e2015-04-03 16:02:44 +01004461void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4462 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4463 locations->SetInAt(0, Location::RequiresRegister());
4464 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4465}
4466
4467void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004468 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01004469}
4470
Alexandre Rames5319def2014-10-23 10:03:10 +01004471void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004472 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4473 ? LocationSummary::kCallOnSlowPath
4474 : LocationSummary::kNoCall;
4475 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004476 locations->SetInAt(0, Location::RequiresRegister());
4477 if (instruction->HasUses()) {
4478 locations->SetOut(Location::SameAsFirstInput());
4479 }
4480}
4481
Calin Juravle2ae48182016-03-16 14:05:09 +00004482void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4483 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004484 return;
4485 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004486
Alexandre Ramesd921d642015-04-16 15:07:16 +01004487 BlockPoolsScope block_pools(GetVIXLAssembler());
4488 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004489 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004490 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004491}
4492
Calin Juravle2ae48182016-03-16 14:05:09 +00004493void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004494 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004495 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004496
4497 LocationSummary* locations = instruction->GetLocations();
4498 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004499
4500 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004501}
4502
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004503void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004504 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004505}
4506
Alexandre Rames67555f72014-11-18 10:55:16 +00004507void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4508 HandleBinaryOp(instruction);
4509}
4510
4511void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4512 HandleBinaryOp(instruction);
4513}
4514
Alexandre Rames3e69f162014-12-10 10:36:50 +00004515void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4516 LOG(FATAL) << "Unreachable";
4517}
4518
4519void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4520 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4521}
4522
Alexandre Rames5319def2014-10-23 10:03:10 +01004523void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4524 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4525 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4526 if (location.IsStackSlot()) {
4527 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4528 } else if (location.IsDoubleStackSlot()) {
4529 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4530 }
4531 locations->SetOut(location);
4532}
4533
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004534void InstructionCodeGeneratorARM64::VisitParameterValue(
4535 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004536 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004537}
4538
4539void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4540 LocationSummary* locations =
4541 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004542 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004543}
4544
4545void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4546 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4547 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004548}
4549
4550void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4551 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004552 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004553 locations->SetInAt(i, Location::Any());
4554 }
4555 locations->SetOut(Location::Any());
4556}
4557
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004558void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004559 LOG(FATAL) << "Unreachable";
4560}
4561
Serban Constantinescu02164b32014-11-13 14:05:07 +00004562void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004563 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004564 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004565 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
4566 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004567 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4568
4569 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004570 case Primitive::kPrimInt:
4571 case Primitive::kPrimLong:
4572 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004573 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004574 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4575 break;
4576
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004577 case Primitive::kPrimFloat:
4578 case Primitive::kPrimDouble: {
4579 InvokeRuntimeCallingConvention calling_convention;
4580 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4581 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4582 locations->SetOut(calling_convention.GetReturnLocation(type));
4583
4584 break;
4585 }
4586
Serban Constantinescu02164b32014-11-13 14:05:07 +00004587 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004588 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004589 }
4590}
4591
4592void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4593 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004594
Serban Constantinescu02164b32014-11-13 14:05:07 +00004595 switch (type) {
4596 case Primitive::kPrimInt:
4597 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004598 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004599 break;
4600 }
4601
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004602 case Primitive::kPrimFloat:
4603 case Primitive::kPrimDouble: {
4604 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
4605 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004606 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004607 if (type == Primitive::kPrimFloat) {
4608 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4609 } else {
4610 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4611 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004612 break;
4613 }
4614
Serban Constantinescu02164b32014-11-13 14:05:07 +00004615 default:
4616 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004617 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004618 }
4619}
4620
Calin Juravle27df7582015-04-17 19:12:31 +01004621void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4622 memory_barrier->SetLocations(nullptr);
4623}
4624
4625void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004626 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004627}
4628
Alexandre Rames5319def2014-10-23 10:03:10 +01004629void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4630 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4631 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004632 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004633}
4634
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004635void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004636 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004637}
4638
4639void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4640 instruction->SetLocations(nullptr);
4641}
4642
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004643void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004644 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004645}
4646
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004647void LocationsBuilderARM64::VisitRor(HRor* ror) {
4648 HandleBinaryOp(ror);
4649}
4650
4651void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4652 HandleBinaryOp(ror);
4653}
4654
Serban Constantinescu02164b32014-11-13 14:05:07 +00004655void LocationsBuilderARM64::VisitShl(HShl* shl) {
4656 HandleShift(shl);
4657}
4658
4659void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4660 HandleShift(shl);
4661}
4662
4663void LocationsBuilderARM64::VisitShr(HShr* shr) {
4664 HandleShift(shr);
4665}
4666
4667void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4668 HandleShift(shr);
4669}
4670
Alexandre Rames5319def2014-10-23 10:03:10 +01004671void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004672 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004673}
4674
4675void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004676 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004677}
4678
Alexandre Rames67555f72014-11-18 10:55:16 +00004679void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004680 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004681}
4682
4683void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004684 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004685}
4686
4687void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004688 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004689}
4690
Alexandre Rames67555f72014-11-18 10:55:16 +00004691void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004692 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004693}
4694
Calin Juravlee460d1d2015-09-29 04:52:17 +01004695void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4696 HUnresolvedInstanceFieldGet* instruction) {
4697 FieldAccessCallingConventionARM64 calling_convention;
4698 codegen_->CreateUnresolvedFieldLocationSummary(
4699 instruction, instruction->GetFieldType(), calling_convention);
4700}
4701
4702void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4703 HUnresolvedInstanceFieldGet* instruction) {
4704 FieldAccessCallingConventionARM64 calling_convention;
4705 codegen_->GenerateUnresolvedFieldAccess(instruction,
4706 instruction->GetFieldType(),
4707 instruction->GetFieldIndex(),
4708 instruction->GetDexPc(),
4709 calling_convention);
4710}
4711
4712void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4713 HUnresolvedInstanceFieldSet* instruction) {
4714 FieldAccessCallingConventionARM64 calling_convention;
4715 codegen_->CreateUnresolvedFieldLocationSummary(
4716 instruction, instruction->GetFieldType(), calling_convention);
4717}
4718
4719void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4720 HUnresolvedInstanceFieldSet* instruction) {
4721 FieldAccessCallingConventionARM64 calling_convention;
4722 codegen_->GenerateUnresolvedFieldAccess(instruction,
4723 instruction->GetFieldType(),
4724 instruction->GetFieldIndex(),
4725 instruction->GetDexPc(),
4726 calling_convention);
4727}
4728
4729void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4730 HUnresolvedStaticFieldGet* instruction) {
4731 FieldAccessCallingConventionARM64 calling_convention;
4732 codegen_->CreateUnresolvedFieldLocationSummary(
4733 instruction, instruction->GetFieldType(), calling_convention);
4734}
4735
4736void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4737 HUnresolvedStaticFieldGet* instruction) {
4738 FieldAccessCallingConventionARM64 calling_convention;
4739 codegen_->GenerateUnresolvedFieldAccess(instruction,
4740 instruction->GetFieldType(),
4741 instruction->GetFieldIndex(),
4742 instruction->GetDexPc(),
4743 calling_convention);
4744}
4745
4746void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4747 HUnresolvedStaticFieldSet* instruction) {
4748 FieldAccessCallingConventionARM64 calling_convention;
4749 codegen_->CreateUnresolvedFieldLocationSummary(
4750 instruction, instruction->GetFieldType(), calling_convention);
4751}
4752
4753void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4754 HUnresolvedStaticFieldSet* instruction) {
4755 FieldAccessCallingConventionARM64 calling_convention;
4756 codegen_->GenerateUnresolvedFieldAccess(instruction,
4757 instruction->GetFieldType(),
4758 instruction->GetFieldIndex(),
4759 instruction->GetDexPc(),
4760 calling_convention);
4761}
4762
Alexandre Rames5319def2014-10-23 10:03:10 +01004763void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4764 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4765}
4766
4767void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004768 HBasicBlock* block = instruction->GetBlock();
4769 if (block->GetLoopInformation() != nullptr) {
4770 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4771 // The back edge will generate the suspend check.
4772 return;
4773 }
4774 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4775 // The goto will generate the suspend check.
4776 return;
4777 }
4778 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004779}
4780
Alexandre Rames67555f72014-11-18 10:55:16 +00004781void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4782 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004783 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004784 InvokeRuntimeCallingConvention calling_convention;
4785 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4786}
4787
4788void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
4789 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004790 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004791 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004792}
4793
4794void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4795 LocationSummary* locations =
4796 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4797 Primitive::Type input_type = conversion->GetInputType();
4798 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004799 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004800 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4801 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4802 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4803 }
4804
Alexandre Rames542361f2015-01-29 16:57:31 +00004805 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004806 locations->SetInAt(0, Location::RequiresFpuRegister());
4807 } else {
4808 locations->SetInAt(0, Location::RequiresRegister());
4809 }
4810
Alexandre Rames542361f2015-01-29 16:57:31 +00004811 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004812 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4813 } else {
4814 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4815 }
4816}
4817
4818void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4819 Primitive::Type result_type = conversion->GetResultType();
4820 Primitive::Type input_type = conversion->GetInputType();
4821
4822 DCHECK_NE(input_type, result_type);
4823
Alexandre Rames542361f2015-01-29 16:57:31 +00004824 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004825 int result_size = Primitive::ComponentSize(result_type);
4826 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004827 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004828 Register output = OutputRegister(conversion);
4829 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004830 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004831 // 'int' values are used directly as W registers, discarding the top
4832 // bits, so we don't need to sign-extend and can just perform a move.
4833 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4834 // top 32 bits of the target register. We theoretically could leave those
4835 // bits unchanged, but we would have to make sure that no code uses a
4836 // 32bit input value as a 64bit value assuming that the top 32 bits are
4837 // zero.
4838 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004839 } else if (result_type == Primitive::kPrimChar ||
4840 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4841 __ Ubfx(output,
4842 output.IsX() ? source.X() : source.W(),
4843 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004844 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004845 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004846 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004847 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004848 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004849 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004850 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4851 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004852 } else if (Primitive::IsFloatingPointType(result_type) &&
4853 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004854 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4855 } else {
4856 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4857 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004858 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004859}
Alexandre Rames67555f72014-11-18 10:55:16 +00004860
Serban Constantinescu02164b32014-11-13 14:05:07 +00004861void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4862 HandleShift(ushr);
4863}
4864
4865void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4866 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004867}
4868
4869void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4870 HandleBinaryOp(instruction);
4871}
4872
4873void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4874 HandleBinaryOp(instruction);
4875}
4876
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004877void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004878 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004879 LOG(FATAL) << "Unreachable";
4880}
4881
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004882void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004883 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004884 LOG(FATAL) << "Unreachable";
4885}
4886
Mark Mendellfe57faa2015-09-18 09:26:15 -04004887// Simple implementation of packed switch - generate cascaded compare/jumps.
4888void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4889 LocationSummary* locations =
4890 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4891 locations->SetInAt(0, Location::RequiresRegister());
4892}
4893
4894void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4895 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004896 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004897 Register value_reg = InputRegisterAt(switch_instr, 0);
4898 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4899
Zheng Xu3927c8b2015-11-18 17:46:25 +08004900 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004901 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08004902 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4903 // make sure we don't emit it if the target may run out of range.
4904 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4905 // ranges and emit the tables only as required.
4906 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004907
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004908 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004909 // Current instruction id is an upper bound of the number of HIRs in the graph.
4910 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4911 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004912 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4913 Register temp = temps.AcquireW();
4914 __ Subs(temp, value_reg, Operand(lower_bound));
4915
Zheng Xu3927c8b2015-11-18 17:46:25 +08004916 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004917 // Jump to successors[0] if value == lower_bound.
4918 __ B(eq, codegen_->GetLabelOf(successors[0]));
4919 int32_t last_index = 0;
4920 for (; num_entries - last_index > 2; last_index += 2) {
4921 __ Subs(temp, temp, Operand(2));
4922 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4923 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4924 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4925 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4926 }
4927 if (num_entries - last_index == 2) {
4928 // The last missing case_value.
4929 __ Cmp(temp, Operand(1));
4930 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004931 }
4932
4933 // And the default for any other value.
4934 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4935 __ B(codegen_->GetLabelOf(default_block));
4936 }
4937 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004938 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004939
4940 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4941
4942 // Below instructions should use at most one blocked register. Since there are two blocked
4943 // registers, we are free to block one.
4944 Register temp_w = temps.AcquireW();
4945 Register index;
4946 // Remove the bias.
4947 if (lower_bound != 0) {
4948 index = temp_w;
4949 __ Sub(index, value_reg, Operand(lower_bound));
4950 } else {
4951 index = value_reg;
4952 }
4953
4954 // Jump to default block if index is out of the range.
4955 __ Cmp(index, Operand(num_entries));
4956 __ B(hs, codegen_->GetLabelOf(default_block));
4957
4958 // In current VIXL implementation, it won't require any blocked registers to encode the
4959 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4960 // register pressure.
4961 Register table_base = temps.AcquireX();
4962 // Load jump offset from the table.
4963 __ Adr(table_base, jump_table->GetTableStartLabel());
4964 Register jump_offset = temp_w;
4965 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4966
4967 // Jump to target block by branching to table_base(pc related) + offset.
4968 Register target_address = table_base;
4969 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4970 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004971 }
4972}
4973
Roland Levillain44015862016-01-22 11:47:17 +00004974void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4975 Location out,
4976 uint32_t offset,
4977 Location maybe_temp) {
4978 Primitive::Type type = Primitive::kPrimNot;
4979 Register out_reg = RegisterFrom(out, type);
4980 if (kEmitCompilerReadBarrier) {
4981 Register temp_reg = RegisterFrom(maybe_temp, type);
4982 if (kUseBakerReadBarrier) {
4983 // Load with fast path based Baker's read barrier.
4984 // /* HeapReference<Object> */ out = *(out + offset)
4985 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4986 out,
4987 out_reg,
4988 offset,
4989 temp_reg,
4990 /* needs_null_check */ false,
4991 /* use_load_acquire */ false);
4992 } else {
4993 // Load with slow path based read barrier.
4994 // Save the value of `out` into `maybe_temp` before overwriting it
4995 // in the following move operation, as we will need it for the
4996 // read barrier below.
4997 __ Mov(temp_reg, out_reg);
4998 // /* HeapReference<Object> */ out = *(out + offset)
4999 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5000 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5001 }
5002 } else {
5003 // Plain load with no read barrier.
5004 // /* HeapReference<Object> */ out = *(out + offset)
5005 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5006 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5007 }
5008}
5009
5010void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
5011 Location out,
5012 Location obj,
5013 uint32_t offset,
5014 Location maybe_temp) {
5015 Primitive::Type type = Primitive::kPrimNot;
5016 Register out_reg = RegisterFrom(out, type);
5017 Register obj_reg = RegisterFrom(obj, type);
5018 if (kEmitCompilerReadBarrier) {
5019 if (kUseBakerReadBarrier) {
5020 // Load with fast path based Baker's read barrier.
5021 Register temp_reg = RegisterFrom(maybe_temp, type);
5022 // /* HeapReference<Object> */ out = *(obj + offset)
5023 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5024 out,
5025 obj_reg,
5026 offset,
5027 temp_reg,
5028 /* needs_null_check */ false,
5029 /* use_load_acquire */ false);
5030 } else {
5031 // Load with slow path based read barrier.
5032 // /* HeapReference<Object> */ out = *(obj + offset)
5033 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5034 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5035 }
5036 } else {
5037 // Plain load with no read barrier.
5038 // /* HeapReference<Object> */ out = *(obj + offset)
5039 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5040 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5041 }
5042}
5043
5044void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
5045 Location root,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005046 Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005047 uint32_t offset,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005048 vixl::aarch64::Label* fixup_label) {
Roland Levillain44015862016-01-22 11:47:17 +00005049 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
5050 if (kEmitCompilerReadBarrier) {
5051 if (kUseBakerReadBarrier) {
5052 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5053 // Baker's read barrier are used:
5054 //
5055 // root = obj.field;
5056 // if (Thread::Current()->GetIsGcMarking()) {
5057 // root = ReadBarrier::Mark(root)
5058 // }
5059
5060 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005061 if (fixup_label == nullptr) {
5062 __ Ldr(root_reg, MemOperand(obj, offset));
5063 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005064 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005065 __ Bind(fixup_label);
5066 __ ldr(root_reg, MemOperand(obj, offset));
5067 }
Roland Levillain44015862016-01-22 11:47:17 +00005068 static_assert(
5069 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5070 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5071 "have different sizes.");
5072 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5073 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5074 "have different sizes.");
5075
5076 // Slow path used to mark the GC root `root`.
5077 SlowPathCodeARM64* slow_path =
5078 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root, root);
5079 codegen_->AddSlowPath(slow_path);
5080
5081 MacroAssembler* masm = GetVIXLAssembler();
5082 UseScratchRegisterScope temps(masm);
5083 Register temp = temps.AcquireW();
5084 // temp = Thread::Current()->GetIsGcMarking()
5085 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64WordSize>().Int32Value()));
5086 __ Cbnz(temp, slow_path->GetEntryLabel());
5087 __ Bind(slow_path->GetExitLabel());
5088 } else {
5089 // GC root loaded through a slow path for read barriers other
5090 // than Baker's.
5091 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005092 if (fixup_label == nullptr) {
5093 __ Add(root_reg.X(), obj.X(), offset);
5094 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005095 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005096 __ Bind(fixup_label);
5097 __ add(root_reg.X(), obj.X(), offset);
5098 }
Roland Levillain44015862016-01-22 11:47:17 +00005099 // /* mirror::Object* */ root = root->Read()
5100 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5101 }
5102 } else {
5103 // Plain GC root load with no read barrier.
5104 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005105 if (fixup_label == nullptr) {
5106 __ Ldr(root_reg, MemOperand(obj, offset));
5107 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005108 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005109 __ Bind(fixup_label);
5110 __ ldr(root_reg, MemOperand(obj, offset));
5111 }
Roland Levillain44015862016-01-22 11:47:17 +00005112 // Note that GC roots are not affected by heap poisoning, thus we
5113 // do not have to unpoison `root_reg` here.
5114 }
5115}
5116
5117void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5118 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005119 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005120 uint32_t offset,
5121 Register temp,
5122 bool needs_null_check,
5123 bool use_load_acquire) {
5124 DCHECK(kEmitCompilerReadBarrier);
5125 DCHECK(kUseBakerReadBarrier);
5126
5127 // /* HeapReference<Object> */ ref = *(obj + offset)
5128 Location no_index = Location::NoLocation();
Roland Levillainbfea3352016-06-23 13:48:47 +01005129 size_t no_scale_factor = 0U;
5130 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5131 ref,
5132 obj,
5133 offset,
5134 no_index,
5135 no_scale_factor,
5136 temp,
5137 needs_null_check,
5138 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005139}
5140
5141void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5142 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005143 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005144 uint32_t data_offset,
5145 Location index,
5146 Register temp,
5147 bool needs_null_check) {
5148 DCHECK(kEmitCompilerReadBarrier);
5149 DCHECK(kUseBakerReadBarrier);
5150
5151 // Array cells are never volatile variables, therefore array loads
5152 // never use Load-Acquire instructions on ARM64.
5153 const bool use_load_acquire = false;
5154
Roland Levillainbfea3352016-06-23 13:48:47 +01005155 static_assert(
5156 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5157 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005158 // /* HeapReference<Object> */ ref =
5159 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005160 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5161 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5162 ref,
5163 obj,
5164 data_offset,
5165 index,
5166 scale_factor,
5167 temp,
5168 needs_null_check,
5169 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005170}
5171
5172void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5173 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005174 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005175 uint32_t offset,
5176 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005177 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005178 Register temp,
5179 bool needs_null_check,
5180 bool use_load_acquire) {
5181 DCHECK(kEmitCompilerReadBarrier);
5182 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005183 // If we are emitting an array load, we should not be using a
5184 // Load Acquire instruction. In other words:
5185 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5186 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005187
5188 MacroAssembler* masm = GetVIXLAssembler();
5189 UseScratchRegisterScope temps(masm);
5190
5191 // In slow path based read barriers, the read barrier call is
5192 // inserted after the original load. However, in fast path based
5193 // Baker's read barriers, we need to perform the load of
5194 // mirror::Object::monitor_ *before* the original reference load.
5195 // This load-load ordering is required by the read barrier.
5196 // The fast path/slow path (for Baker's algorithm) should look like:
5197 //
5198 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5199 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5200 // HeapReference<Object> ref = *src; // Original reference load.
5201 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5202 // if (is_gray) {
5203 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5204 // }
5205 //
5206 // Note: the original implementation in ReadBarrier::Barrier is
5207 // slightly more complex as it performs additional checks that we do
5208 // not do here for performance reasons.
5209
5210 Primitive::Type type = Primitive::kPrimNot;
5211 Register ref_reg = RegisterFrom(ref, type);
5212 DCHECK(obj.IsW());
5213 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5214
5215 // /* int32_t */ monitor = obj->monitor_
5216 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5217 if (needs_null_check) {
5218 MaybeRecordImplicitNullCheck(instruction);
5219 }
5220 // /* LockWord */ lock_word = LockWord(monitor)
5221 static_assert(sizeof(LockWord) == sizeof(int32_t),
5222 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005223
Vladimir Marko877a0332016-07-11 19:30:56 +01005224 // Introduce a dependency on the lock_word including rb_state,
5225 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005226 // a memory barrier (which would be more expensive).
Vladimir Marko877a0332016-07-11 19:30:56 +01005227 // obj is unchanged by this operation, but its value now depends on temp.
5228 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005229
5230 // The actual reference load.
5231 if (index.IsValid()) {
Roland Levillainbfea3352016-06-23 13:48:47 +01005232 // Load types involving an "index".
5233 if (use_load_acquire) {
5234 // UnsafeGetObjectVolatile intrinsic case.
5235 // Register `index` is not an index in an object array, but an
5236 // offset to an object reference field within object `obj`.
5237 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5238 DCHECK(instruction->GetLocations()->Intrinsified());
5239 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5240 << instruction->AsInvoke()->GetIntrinsic();
5241 DCHECK_EQ(offset, 0U);
5242 DCHECK_EQ(scale_factor, 0U);
5243 DCHECK_EQ(needs_null_check, 0U);
5244 // /* HeapReference<Object> */ ref = *(obj + index)
5245 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5246 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005247 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005248 // ArrayGet and UnsafeGetObject intrinsics cases.
5249 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5250 if (index.IsConstant()) {
5251 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5252 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5253 } else {
Vladimir Marko877a0332016-07-11 19:30:56 +01005254 Register temp2 = temps.AcquireW();
Roland Levillainbfea3352016-06-23 13:48:47 +01005255 __ Add(temp2, obj, offset);
5256 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, scale_factor));
5257 temps.Release(temp2);
5258 }
Roland Levillain44015862016-01-22 11:47:17 +00005259 }
Roland Levillain44015862016-01-22 11:47:17 +00005260 } else {
5261 // /* HeapReference<Object> */ ref = *(obj + offset)
5262 MemOperand field = HeapOperand(obj, offset);
5263 if (use_load_acquire) {
5264 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5265 } else {
5266 Load(type, ref_reg, field);
5267 }
5268 }
5269
5270 // Object* ref = ref_addr->AsMirrorPtr()
5271 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5272
5273 // Slow path used to mark the object `ref` when it is gray.
5274 SlowPathCodeARM64* slow_path =
5275 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref, ref);
5276 AddSlowPath(slow_path);
5277
5278 // if (rb_state == ReadBarrier::gray_ptr_)
5279 // ref = ReadBarrier::Mark(ref);
Vladimir Marko877a0332016-07-11 19:30:56 +01005280 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5281 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
5282 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
5283 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
5284 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005285 __ Bind(slow_path->GetExitLabel());
5286}
5287
5288void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5289 Location out,
5290 Location ref,
5291 Location obj,
5292 uint32_t offset,
5293 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005294 DCHECK(kEmitCompilerReadBarrier);
5295
Roland Levillain44015862016-01-22 11:47:17 +00005296 // Insert a slow path based read barrier *after* the reference load.
5297 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005298 // If heap poisoning is enabled, the unpoisoning of the loaded
5299 // reference will be carried out by the runtime within the slow
5300 // path.
5301 //
5302 // Note that `ref` currently does not get unpoisoned (when heap
5303 // poisoning is enabled), which is alright as the `ref` argument is
5304 // not used by the artReadBarrierSlow entry point.
5305 //
5306 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5307 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5308 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5309 AddSlowPath(slow_path);
5310
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005311 __ B(slow_path->GetEntryLabel());
5312 __ Bind(slow_path->GetExitLabel());
5313}
5314
Roland Levillain44015862016-01-22 11:47:17 +00005315void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5316 Location out,
5317 Location ref,
5318 Location obj,
5319 uint32_t offset,
5320 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005321 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005322 // Baker's read barriers shall be handled by the fast path
5323 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5324 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005325 // If heap poisoning is enabled, unpoisoning will be taken care of
5326 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005327 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005328 } else if (kPoisonHeapReferences) {
5329 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5330 }
5331}
5332
Roland Levillain44015862016-01-22 11:47:17 +00005333void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5334 Location out,
5335 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005336 DCHECK(kEmitCompilerReadBarrier);
5337
Roland Levillain44015862016-01-22 11:47:17 +00005338 // Insert a slow path based read barrier *after* the GC root load.
5339 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005340 // Note that GC roots are not affected by heap poisoning, so we do
5341 // not need to do anything special for this here.
5342 SlowPathCodeARM64* slow_path =
5343 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5344 AddSlowPath(slow_path);
5345
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005346 __ B(slow_path->GetEntryLabel());
5347 __ Bind(slow_path->GetExitLabel());
5348}
5349
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005350void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5351 LocationSummary* locations =
5352 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5353 locations->SetInAt(0, Location::RequiresRegister());
5354 locations->SetOut(Location::RequiresRegister());
5355}
5356
5357void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5358 LocationSummary* locations = instruction->GetLocations();
5359 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00005360 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005361 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5362 instruction->GetIndex(), kArm64PointerSize).SizeValue();
5363 } else {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005364 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5365 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
5366 method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
5367 instruction->GetIndex() % ImTable::kSize, kArm64PointerSize));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005368 }
5369 __ Ldr(XRegisterFrom(locations->Out()),
5370 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
5371}
5372
5373
5374
Alexandre Rames67555f72014-11-18 10:55:16 +00005375#undef __
5376#undef QUICK_ENTRY_POINT
5377
Alexandre Rames5319def2014-10-23 10:03:10 +01005378} // namespace arm64
5379} // namespace art