blob: 435ae5e9549d8b4edb77dc4b85f8ce7e294524fb [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
36
37using namespace vixl; // NOLINT(build/namespaces)
38
39#ifdef __
40#error "ARM64 Codegen VIXL macro-assembler macro already defined."
41#endif
42
Alexandre Rames5319def2014-10-23 10:03:10 +010043namespace art {
44
Roland Levillain22ccc3a2015-11-24 13:10:05 +000045template<class MirrorType>
46class GcRoot;
47
Alexandre Rames5319def2014-10-23 10:03:10 +010048namespace arm64 {
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050using helpers::CPURegisterFrom;
51using helpers::DRegisterFrom;
52using helpers::FPRegisterFrom;
53using helpers::HeapOperand;
54using helpers::HeapOperandFrom;
55using helpers::InputCPURegisterAt;
56using helpers::InputFPRegisterAt;
57using helpers::InputRegisterAt;
58using helpers::InputOperandAt;
59using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080060using helpers::LocationFrom;
61using helpers::OperandFromMemOperand;
62using helpers::OutputCPURegister;
63using helpers::OutputFPRegister;
64using helpers::OutputRegister;
65using helpers::RegisterFrom;
66using helpers::StackOperandFrom;
67using helpers::VIXLRegCodeFromART;
68using helpers::WRegisterFrom;
69using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000070using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080071using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080072
Alexandre Rames5319def2014-10-23 10:03:10 +010073static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000074// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080075// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
76// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000077static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010078
Alexandre Rames5319def2014-10-23 10:03:10 +010079inline Condition ARM64Condition(IfCondition cond) {
80 switch (cond) {
81 case kCondEQ: return eq;
82 case kCondNE: return ne;
83 case kCondLT: return lt;
84 case kCondLE: return le;
85 case kCondGT: return gt;
86 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070087 case kCondB: return lo;
88 case kCondBE: return ls;
89 case kCondA: return hi;
90 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010091 }
Roland Levillain7f63c522015-07-13 15:54:55 +000092 LOG(FATAL) << "Unreachable";
93 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010094}
95
Vladimir Markod6e069b2016-01-18 11:11:01 +000096inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
97 // The ARM64 condition codes can express all the necessary branches, see the
98 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
99 // There is no dex instruction or HIR that would need the missing conditions
100 // "equal or unordered" or "not equal".
101 switch (cond) {
102 case kCondEQ: return eq;
103 case kCondNE: return ne /* unordered */;
104 case kCondLT: return gt_bias ? cc : lt /* unordered */;
105 case kCondLE: return gt_bias ? ls : le /* unordered */;
106 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
107 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
108 default:
109 LOG(FATAL) << "UNREACHABLE";
110 UNREACHABLE();
111 }
112}
113
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000114Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
116 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
117 // but we use the exact registers for clarity.
118 if (return_type == Primitive::kPrimFloat) {
119 return LocationFrom(s0);
120 } else if (return_type == Primitive::kPrimDouble) {
121 return LocationFrom(d0);
122 } else if (return_type == Primitive::kPrimLong) {
123 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100124 } else if (return_type == Primitive::kPrimVoid) {
125 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000126 } else {
127 return LocationFrom(w0);
128 }
129}
130
Alexandre Rames5319def2014-10-23 10:03:10 +0100131Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100133}
134
Alexandre Rames67555f72014-11-18 10:55:16 +0000135#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
136#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100137
Zheng Xuda403092015-04-24 17:35:39 +0800138// Calculate memory accessing operand for save/restore live registers.
139static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
140 RegisterSet* register_set,
141 int64_t spill_offset,
142 bool is_save) {
143 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
144 codegen->GetNumberOfCoreRegisters(),
145 register_set->GetFloatingPointRegisters(),
146 codegen->GetNumberOfFloatingPointRegisters()));
147
148 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
149 register_set->GetCoreRegisters() & (~callee_saved_core_registers.list()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000150 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
151 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.list()));
Zheng Xuda403092015-04-24 17:35:39 +0800152
153 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
154 UseScratchRegisterScope temps(masm);
155
156 Register base = masm->StackPointer();
157 int64_t core_spill_size = core_list.TotalSizeInBytes();
158 int64_t fp_spill_size = fp_list.TotalSizeInBytes();
159 int64_t reg_size = kXRegSizeInBytes;
160 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
161 uint32_t ls_access_size = WhichPowerOf2(reg_size);
162 if (((core_list.Count() > 1) || (fp_list.Count() > 1)) &&
163 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
164 // If the offset does not fit in the instruction's immediate field, use an alternate register
165 // to compute the base address(float point registers spill base address).
166 Register new_base = temps.AcquireSameSizeAs(base);
167 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
168 base = new_base;
169 spill_offset = -core_spill_size;
170 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
171 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
172 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
173 }
174
175 if (is_save) {
176 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
177 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
178 } else {
179 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
180 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
181 }
182}
183
184void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
185 RegisterSet* register_set = locations->GetLiveRegisters();
186 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
187 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
188 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
189 // If the register holds an object, update the stack mask.
190 if (locations->RegisterContainsObject(i)) {
191 locations->SetStackBit(stack_offset / kVRegSize);
192 }
193 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
194 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
195 saved_core_stack_offsets_[i] = stack_offset;
196 stack_offset += kXRegSizeInBytes;
197 }
198 }
199
200 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
201 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
202 register_set->ContainsFloatingPointRegister(i)) {
203 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
204 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
205 saved_fpu_stack_offsets_[i] = stack_offset;
206 stack_offset += kDRegSizeInBytes;
207 }
208 }
209
210 SaveRestoreLiveRegistersHelper(codegen, register_set,
211 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
212}
213
214void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
215 RegisterSet* register_set = locations->GetLiveRegisters();
216 SaveRestoreLiveRegistersHelper(codegen, register_set,
217 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
218}
219
Alexandre Rames5319def2014-10-23 10:03:10 +0100220class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
221 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100222 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : instruction_(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100223
Alexandre Rames67555f72014-11-18 10:55:16 +0000224 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100225 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000226 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227
Alexandre Rames5319def2014-10-23 10:03:10 +0100228 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000229 if (instruction_->CanThrowIntoCatchBlock()) {
230 // Live registers will be restored in the catch block if caught.
231 SaveLiveRegisters(codegen, instruction_->GetLocations());
232 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000233 // We're moving two locations to locations that could overlap, so we need a parallel
234 // move resolver.
235 InvokeRuntimeCallingConvention calling_convention;
236 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100237 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
238 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000239 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000240 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800241 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100242 }
243
Alexandre Rames8158f282015-08-07 10:26:17 +0100244 bool IsFatal() const OVERRIDE { return true; }
245
Alexandre Rames9931f312015-06-19 14:47:01 +0100246 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
247
Alexandre Rames5319def2014-10-23 10:03:10 +0100248 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000249 HBoundsCheck* const instruction_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000250
Alexandre Rames5319def2014-10-23 10:03:10 +0100251 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
252};
253
Alexandre Rames67555f72014-11-18 10:55:16 +0000254class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
255 public:
256 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
257
258 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
259 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
260 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000261 if (instruction_->CanThrowIntoCatchBlock()) {
262 // Live registers will be restored in the catch block if caught.
263 SaveLiveRegisters(codegen, instruction_->GetLocations());
264 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000265 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000266 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800267 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000268 }
269
Alexandre Rames8158f282015-08-07 10:26:17 +0100270 bool IsFatal() const OVERRIDE { return true; }
271
Alexandre Rames9931f312015-06-19 14:47:01 +0100272 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
273
Alexandre Rames67555f72014-11-18 10:55:16 +0000274 private:
275 HDivZeroCheck* const instruction_;
276 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
277};
278
279class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
280 public:
281 LoadClassSlowPathARM64(HLoadClass* cls,
282 HInstruction* at,
283 uint32_t dex_pc,
284 bool do_clinit)
285 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
286 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
287 }
288
289 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
290 LocationSummary* locations = at_->GetLocations();
291 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
292
293 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000294 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000295
296 InvokeRuntimeCallingConvention calling_convention;
297 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000298 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
299 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000300 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800301 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100302 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800303 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100304 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800305 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000306
307 // Move the class to the desired location.
308 Location out = locations->Out();
309 if (out.IsValid()) {
310 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
311 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000312 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000313 }
314
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000315 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000316 __ B(GetExitLabel());
317 }
318
Alexandre Rames9931f312015-06-19 14:47:01 +0100319 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
320
Alexandre Rames67555f72014-11-18 10:55:16 +0000321 private:
322 // The class this slow path will load.
323 HLoadClass* const cls_;
324
325 // The instruction where this slow path is happening.
326 // (Might be the load class or an initialization check).
327 HInstruction* const at_;
328
329 // The dex PC of `at_`.
330 const uint32_t dex_pc_;
331
332 // Whether to initialize the class.
333 const bool do_clinit_;
334
335 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
336};
337
338class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
339 public:
340 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
341
342 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
343 LocationSummary* locations = instruction_->GetLocations();
344 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
345 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
346
347 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000348 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000349
350 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800351 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000352 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000353 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100354 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000355 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000356 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000357
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000358 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000359 __ B(GetExitLabel());
360 }
361
Alexandre Rames9931f312015-06-19 14:47:01 +0100362 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
363
Alexandre Rames67555f72014-11-18 10:55:16 +0000364 private:
365 HLoadString* const instruction_;
366
367 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
368};
369
Alexandre Rames5319def2014-10-23 10:03:10 +0100370class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
371 public:
372 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
373
Alexandre Rames67555f72014-11-18 10:55:16 +0000374 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
375 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100376 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000377 if (instruction_->CanThrowIntoCatchBlock()) {
378 // Live registers will be restored in the catch block if caught.
379 SaveLiveRegisters(codegen, instruction_->GetLocations());
380 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000381 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000382 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800383 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100384 }
385
Alexandre Rames8158f282015-08-07 10:26:17 +0100386 bool IsFatal() const OVERRIDE { return true; }
387
Alexandre Rames9931f312015-06-19 14:47:01 +0100388 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
389
Alexandre Rames5319def2014-10-23 10:03:10 +0100390 private:
391 HNullCheck* const instruction_;
392
393 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
394};
395
396class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
397 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100398 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
Alexandre Rames5319def2014-10-23 10:03:10 +0100399 : instruction_(instruction), successor_(successor) {}
400
Alexandre Rames67555f72014-11-18 10:55:16 +0000401 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
402 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100403 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000404 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000405 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000406 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800407 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000408 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000409 if (successor_ == nullptr) {
410 __ B(GetReturnLabel());
411 } else {
412 __ B(arm64_codegen->GetLabelOf(successor_));
413 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100414 }
415
416 vixl::Label* GetReturnLabel() {
417 DCHECK(successor_ == nullptr);
418 return &return_label_;
419 }
420
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100421 HBasicBlock* GetSuccessor() const {
422 return successor_;
423 }
424
Alexandre Rames9931f312015-06-19 14:47:01 +0100425 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
426
Alexandre Rames5319def2014-10-23 10:03:10 +0100427 private:
428 HSuspendCheck* const instruction_;
429 // If not null, the block to branch to after the suspend check.
430 HBasicBlock* const successor_;
431
432 // If `successor_` is null, the label to branch to after the suspend check.
433 vixl::Label return_label_;
434
435 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
436};
437
Alexandre Rames67555f72014-11-18 10:55:16 +0000438class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
439 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000440 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
441 : instruction_(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000442
443 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000444 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100445 Location class_to_check = locations->InAt(1);
446 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
447 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000448 DCHECK(instruction_->IsCheckCast()
449 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
450 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100451 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000452
Alexandre Rames67555f72014-11-18 10:55:16 +0000453 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000454
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000455 if (!is_fatal_) {
456 SaveLiveRegisters(codegen, locations);
457 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000458
459 // We're moving two locations to locations that could overlap, so we need a parallel
460 // move resolver.
461 InvokeRuntimeCallingConvention calling_convention;
462 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100463 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
464 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000465
466 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000467 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100468 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Roland Levillain888d0672015-11-23 18:53:50 +0000469 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
470 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000471 Primitive::Type ret_type = instruction_->GetType();
472 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
473 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
474 } else {
475 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100476 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800477 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000478 }
479
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000480 if (!is_fatal_) {
481 RestoreLiveRegisters(codegen, locations);
482 __ B(GetExitLabel());
483 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000484 }
485
Alexandre Rames9931f312015-06-19 14:47:01 +0100486 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000487 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100488
Alexandre Rames67555f72014-11-18 10:55:16 +0000489 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000490 HInstruction* const instruction_;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000491 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000492
Alexandre Rames67555f72014-11-18 10:55:16 +0000493 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
494};
495
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700496class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
497 public:
Aart Bik42249c32016-01-07 15:33:50 -0800498 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100499 : instruction_(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700500
501 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800502 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700503 __ Bind(GetEntryLabel());
504 SaveLiveRegisters(codegen, instruction_->GetLocations());
Aart Bik42249c32016-01-07 15:33:50 -0800505 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
506 instruction_,
507 instruction_->GetDexPc(),
508 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000509 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700510 }
511
Alexandre Rames9931f312015-06-19 14:47:01 +0100512 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
513
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700514 private:
Aart Bik42249c32016-01-07 15:33:50 -0800515 HDeoptimize* const instruction_;
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700516 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
517};
518
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100519class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
520 public:
521 explicit ArraySetSlowPathARM64(HInstruction* instruction) : instruction_(instruction) {}
522
523 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
524 LocationSummary* locations = instruction_->GetLocations();
525 __ Bind(GetEntryLabel());
526 SaveLiveRegisters(codegen, locations);
527
528 InvokeRuntimeCallingConvention calling_convention;
529 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
530 parallel_move.AddMove(
531 locations->InAt(0),
532 LocationFrom(calling_convention.GetRegisterAt(0)),
533 Primitive::kPrimNot,
534 nullptr);
535 parallel_move.AddMove(
536 locations->InAt(1),
537 LocationFrom(calling_convention.GetRegisterAt(1)),
538 Primitive::kPrimInt,
539 nullptr);
540 parallel_move.AddMove(
541 locations->InAt(2),
542 LocationFrom(calling_convention.GetRegisterAt(2)),
543 Primitive::kPrimNot,
544 nullptr);
545 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
546
547 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
548 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
549 instruction_,
550 instruction_->GetDexPc(),
551 this);
552 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
553 RestoreLiveRegisters(codegen, locations);
554 __ B(GetExitLabel());
555 }
556
557 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
558
559 private:
560 HInstruction* const instruction_;
561
562 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
563};
564
Zheng Xu3927c8b2015-11-18 17:46:25 +0800565void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
566 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000567 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800568
569 // We are about to use the assembler to place literals directly. Make sure we have enough
570 // underlying code buffer and we have generated the jump table with right size.
571 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
572 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
573
574 __ Bind(&table_start_);
575 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
576 for (uint32_t i = 0; i < num_entries; i++) {
577 vixl::Label* target_label = codegen->GetLabelOf(successors[i]);
578 DCHECK(target_label->IsBound());
579 ptrdiff_t jump_offset = target_label->location() - table_start_.location();
580 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
581 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
582 Literal<int32_t> literal(jump_offset);
583 __ place(&literal);
584 }
585}
586
Roland Levillain44015862016-01-22 11:47:17 +0000587// Slow path marking an object during a read barrier.
588class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
589 public:
590 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location out, Location obj)
591 : instruction_(instruction), out_(out), obj_(obj) {
592 DCHECK(kEmitCompilerReadBarrier);
593 }
594
595 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
596
597 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
598 LocationSummary* locations = instruction_->GetLocations();
599 Primitive::Type type = Primitive::kPrimNot;
600 DCHECK(locations->CanCall());
601 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
602 DCHECK(instruction_->IsInstanceFieldGet() ||
603 instruction_->IsStaticFieldGet() ||
604 instruction_->IsArrayGet() ||
605 instruction_->IsLoadClass() ||
606 instruction_->IsLoadString() ||
607 instruction_->IsInstanceOf() ||
608 instruction_->IsCheckCast())
609 << "Unexpected instruction in read barrier marking slow path: "
610 << instruction_->DebugName();
611
612 __ Bind(GetEntryLabel());
613 SaveLiveRegisters(codegen, locations);
614
615 InvokeRuntimeCallingConvention calling_convention;
616 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
617 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)), obj_, type);
618 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
619 instruction_,
620 instruction_->GetDexPc(),
621 this);
622 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
623 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
624
625 RestoreLiveRegisters(codegen, locations);
626 __ B(GetExitLabel());
627 }
628
629 private:
630 HInstruction* const instruction_;
631 const Location out_;
632 const Location obj_;
633
634 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
635};
636
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000637// Slow path generating a read barrier for a heap reference.
638class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
639 public:
640 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
641 Location out,
642 Location ref,
643 Location obj,
644 uint32_t offset,
645 Location index)
646 : instruction_(instruction),
647 out_(out),
648 ref_(ref),
649 obj_(obj),
650 offset_(offset),
651 index_(index) {
652 DCHECK(kEmitCompilerReadBarrier);
653 // If `obj` is equal to `out` or `ref`, it means the initial object
654 // has been overwritten by (or after) the heap object reference load
655 // to be instrumented, e.g.:
656 //
657 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000658 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000659 //
660 // In that case, we have lost the information about the original
661 // object, and the emitted read barrier cannot work properly.
662 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
663 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
664 }
665
666 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
667 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
668 LocationSummary* locations = instruction_->GetLocations();
669 Primitive::Type type = Primitive::kPrimNot;
670 DCHECK(locations->CanCall());
671 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
672 DCHECK(!instruction_->IsInvoke() ||
673 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain44015862016-01-22 11:47:17 +0000674 instruction_->GetLocations()->Intrinsified()))
675 << "Unexpected instruction in read barrier for heap reference slow path: "
676 << instruction_->DebugName();
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000677 // The read barrier instrumentation does not support the
678 // HArm64IntermediateAddress instruction yet.
679 DCHECK(!(instruction_->IsArrayGet() &&
680 instruction_->AsArrayGet()->GetArray()->IsArm64IntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000681
682 __ Bind(GetEntryLabel());
683
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000684 SaveLiveRegisters(codegen, locations);
685
686 // We may have to change the index's value, but as `index_` is a
687 // constant member (like other "inputs" of this slow path),
688 // introduce a copy of it, `index`.
689 Location index = index_;
690 if (index_.IsValid()) {
691 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
692 if (instruction_->IsArrayGet()) {
693 // Compute the actual memory offset and store it in `index`.
694 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
695 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
696 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
697 // We are about to change the value of `index_reg` (see the
698 // calls to vixl::MacroAssembler::Lsl and
699 // vixl::MacroAssembler::Mov below), but it has
700 // not been saved by the previous call to
701 // art::SlowPathCode::SaveLiveRegisters, as it is a
702 // callee-save register --
703 // art::SlowPathCode::SaveLiveRegisters does not consider
704 // callee-save registers, as it has been designed with the
705 // assumption that callee-save registers are supposed to be
706 // handled by the called function. So, as a callee-save
707 // register, `index_reg` _would_ eventually be saved onto
708 // the stack, but it would be too late: we would have
709 // changed its value earlier. Therefore, we manually save
710 // it here into another freely available register,
711 // `free_reg`, chosen of course among the caller-save
712 // registers (as a callee-save `free_reg` register would
713 // exhibit the same problem).
714 //
715 // Note we could have requested a temporary register from
716 // the register allocator instead; but we prefer not to, as
717 // this is a slow path, and we know we can find a
718 // caller-save register that is available.
719 Register free_reg = FindAvailableCallerSaveRegister(codegen);
720 __ Mov(free_reg.W(), index_reg);
721 index_reg = free_reg;
722 index = LocationFrom(index_reg);
723 } else {
724 // The initial register stored in `index_` has already been
725 // saved in the call to art::SlowPathCode::SaveLiveRegisters
726 // (as it is not a callee-save register), so we can freely
727 // use it.
728 }
729 // Shifting the index value contained in `index_reg` by the scale
730 // factor (2) cannot overflow in practice, as the runtime is
731 // unable to allocate object arrays with a size larger than
732 // 2^26 - 1 (that is, 2^28 - 4 bytes).
733 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
734 static_assert(
735 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
736 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
737 __ Add(index_reg, index_reg, Operand(offset_));
738 } else {
739 DCHECK(instruction_->IsInvoke());
740 DCHECK(instruction_->GetLocations()->Intrinsified());
741 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
742 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
743 << instruction_->AsInvoke()->GetIntrinsic();
744 DCHECK_EQ(offset_, 0U);
745 DCHECK(index_.IsRegisterPair());
746 // UnsafeGet's offset location is a register pair, the low
747 // part contains the correct offset.
748 index = index_.ToLow();
749 }
750 }
751
752 // We're moving two or three locations to locations that could
753 // overlap, so we need a parallel move resolver.
754 InvokeRuntimeCallingConvention calling_convention;
755 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
756 parallel_move.AddMove(ref_,
757 LocationFrom(calling_convention.GetRegisterAt(0)),
758 type,
759 nullptr);
760 parallel_move.AddMove(obj_,
761 LocationFrom(calling_convention.GetRegisterAt(1)),
762 type,
763 nullptr);
764 if (index.IsValid()) {
765 parallel_move.AddMove(index,
766 LocationFrom(calling_convention.GetRegisterAt(2)),
767 Primitive::kPrimInt,
768 nullptr);
769 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
770 } else {
771 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
772 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
773 }
774 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
775 instruction_,
776 instruction_->GetDexPc(),
777 this);
778 CheckEntrypointTypes<
779 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
780 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
781
782 RestoreLiveRegisters(codegen, locations);
783
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000784 __ B(GetExitLabel());
785 }
786
787 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
788
789 private:
790 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
791 size_t ref = static_cast<int>(XRegisterFrom(ref_).code());
792 size_t obj = static_cast<int>(XRegisterFrom(obj_).code());
793 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
794 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
795 return Register(VIXLRegCodeFromART(i), kXRegSize);
796 }
797 }
798 // We shall never fail to find a free caller-save register, as
799 // there are more than two core caller-save registers on ARM64
800 // (meaning it is possible to find one which is different from
801 // `ref` and `obj`).
802 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
803 LOG(FATAL) << "Could not find a free register";
804 UNREACHABLE();
805 }
806
807 HInstruction* const instruction_;
808 const Location out_;
809 const Location ref_;
810 const Location obj_;
811 const uint32_t offset_;
812 // An additional location containing an index to an array.
813 // Only used for HArrayGet and the UnsafeGetObject &
814 // UnsafeGetObjectVolatile intrinsics.
815 const Location index_;
816
817 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
818};
819
820// Slow path generating a read barrier for a GC root.
821class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
822 public:
823 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
Roland Levillain44015862016-01-22 11:47:17 +0000824 : instruction_(instruction), out_(out), root_(root) {
825 DCHECK(kEmitCompilerReadBarrier);
826 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000827
828 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
829 LocationSummary* locations = instruction_->GetLocations();
830 Primitive::Type type = Primitive::kPrimNot;
831 DCHECK(locations->CanCall());
832 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000833 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
834 << "Unexpected instruction in read barrier for GC root slow path: "
835 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000836
837 __ Bind(GetEntryLabel());
838 SaveLiveRegisters(codegen, locations);
839
840 InvokeRuntimeCallingConvention calling_convention;
841 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
842 // The argument of the ReadBarrierForRootSlow is not a managed
843 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
844 // thus we need a 64-bit move here, and we cannot use
845 //
846 // arm64_codegen->MoveLocation(
847 // LocationFrom(calling_convention.GetRegisterAt(0)),
848 // root_,
849 // type);
850 //
851 // which would emit a 32-bit move, as `type` is a (32-bit wide)
852 // reference type (`Primitive::kPrimNot`).
853 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
854 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
855 instruction_,
856 instruction_->GetDexPc(),
857 this);
858 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
859 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
860
861 RestoreLiveRegisters(codegen, locations);
862 __ B(GetExitLabel());
863 }
864
865 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
866
867 private:
868 HInstruction* const instruction_;
869 const Location out_;
870 const Location root_;
871
872 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
873};
874
Alexandre Rames5319def2014-10-23 10:03:10 +0100875#undef __
876
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100877Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100878 Location next_location;
879 if (type == Primitive::kPrimVoid) {
880 LOG(FATAL) << "Unreachable type " << type;
881 }
882
Alexandre Rames542361f2015-01-29 16:57:31 +0000883 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100884 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
885 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000886 } else if (!Primitive::IsFloatingPointType(type) &&
887 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000888 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
889 } else {
890 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000891 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
892 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100893 }
894
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000895 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000896 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100897 return next_location;
898}
899
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100900Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100901 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100902}
903
Serban Constantinescu579885a2015-02-22 20:51:33 +0000904CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
905 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100906 const CompilerOptions& compiler_options,
907 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100908 : CodeGenerator(graph,
909 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000910 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000911 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000912 callee_saved_core_registers.list(),
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000913 callee_saved_fp_registers.list(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100914 compiler_options,
915 stats),
Alexandre Rames5319def2014-10-23 10:03:10 +0100916 block_labels_(nullptr),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800917 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100918 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000919 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000920 move_resolver_(graph->GetArena(), this),
Vladimir Marko58155012015-08-19 12:49:41 +0000921 isa_features_(isa_features),
Vladimir Marko5233f932015-09-29 19:01:15 +0100922 uint64_literals_(std::less<uint64_t>(),
923 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
924 method_patches_(MethodReferenceComparator(),
925 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
926 call_patches_(MethodReferenceComparator(),
927 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
928 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000929 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000930 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000931 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000932}
Alexandre Rames5319def2014-10-23 10:03:10 +0100933
Alexandre Rames67555f72014-11-18 10:55:16 +0000934#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100935
Zheng Xu3927c8b2015-11-18 17:46:25 +0800936void CodeGeneratorARM64::EmitJumpTables() {
937 for (auto jump_table : jump_tables_) {
938 jump_table->EmitTable(this);
939 }
940}
941
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000942void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800943 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000944 // Ensure we emit the literal pool.
945 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000946
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000947 CodeGenerator::Finalize(allocator);
948}
949
Zheng Xuad4450e2015-04-17 18:48:56 +0800950void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
951 // Note: There are 6 kinds of moves:
952 // 1. constant -> GPR/FPR (non-cycle)
953 // 2. constant -> stack (non-cycle)
954 // 3. GPR/FPR -> GPR/FPR
955 // 4. GPR/FPR -> stack
956 // 5. stack -> GPR/FPR
957 // 6. stack -> stack (non-cycle)
958 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
959 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
960 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
961 // dependency.
962 vixl_temps_.Open(GetVIXLAssembler());
963}
964
965void ParallelMoveResolverARM64::FinishEmitNativeCode() {
966 vixl_temps_.Close();
967}
968
969Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
970 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
971 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
972 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
973 Location scratch = GetScratchLocation(kind);
974 if (!scratch.Equals(Location::NoLocation())) {
975 return scratch;
976 }
977 // Allocate from VIXL temp registers.
978 if (kind == Location::kRegister) {
979 scratch = LocationFrom(vixl_temps_.AcquireX());
980 } else {
981 DCHECK(kind == Location::kFpuRegister);
982 scratch = LocationFrom(vixl_temps_.AcquireD());
983 }
984 AddScratchLocation(scratch);
985 return scratch;
986}
987
988void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
989 if (loc.IsRegister()) {
990 vixl_temps_.Release(XRegisterFrom(loc));
991 } else {
992 DCHECK(loc.IsFpuRegister());
993 vixl_temps_.Release(DRegisterFrom(loc));
994 }
995 RemoveScratchLocation(loc);
996}
997
Alexandre Rames3e69f162014-12-10 10:36:50 +0000998void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100999 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001000 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001001}
1002
Alexandre Rames5319def2014-10-23 10:03:10 +01001003void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001004 MacroAssembler* masm = GetVIXLAssembler();
1005 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001006 __ Bind(&frame_entry_label_);
1007
Serban Constantinescu02164b32014-11-13 14:05:07 +00001008 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1009 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001010 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001011 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001012 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001013 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001014 __ Ldr(wzr, MemOperand(temp, 0));
1015 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001016 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001017
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001018 if (!HasEmptyFrame()) {
1019 int frame_size = GetFrameSize();
1020 // Stack layout:
1021 // sp[frame_size - 8] : lr.
1022 // ... : other preserved core registers.
1023 // ... : other preserved fp registers.
1024 // ... : reserved frame space.
1025 // sp[0] : current method.
1026 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001027 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001028 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1029 frame_size - GetCoreSpillSize());
1030 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1031 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001032 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001033}
1034
1035void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001036 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001037 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001038 if (!HasEmptyFrame()) {
1039 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001040 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1041 frame_size - FrameEntrySpillSize());
1042 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1043 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001044 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001045 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001046 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001047 __ Ret();
1048 GetAssembler()->cfi().RestoreState();
1049 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001050}
1051
Zheng Xuda403092015-04-24 17:35:39 +08001052vixl::CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
1053 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
1054 return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize,
1055 core_spill_mask_);
1056}
1057
1058vixl::CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
1059 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1060 GetNumberOfFloatingPointRegisters()));
1061 return vixl::CPURegList(vixl::CPURegister::kFPRegister, vixl::kDRegSize,
1062 fpu_spill_mask_);
1063}
1064
Alexandre Rames5319def2014-10-23 10:03:10 +01001065void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1066 __ Bind(GetLabelOf(block));
1067}
1068
Calin Juravle175dc732015-08-25 15:42:32 +01001069void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1070 DCHECK(location.IsRegister());
1071 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1072}
1073
Calin Juravlee460d1d2015-09-29 04:52:17 +01001074void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1075 if (location.IsRegister()) {
1076 locations->AddTemp(location);
1077 } else {
1078 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1079 }
1080}
1081
Alexandre Rames5319def2014-10-23 10:03:10 +01001082Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
1083 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001084
Alexandre Rames5319def2014-10-23 10:03:10 +01001085 switch (type) {
1086 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001087 case Primitive::kPrimInt:
1088 case Primitive::kPrimFloat:
1089 return Location::StackSlot(GetStackSlot(load->GetLocal()));
1090
1091 case Primitive::kPrimLong:
1092 case Primitive::kPrimDouble:
1093 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
1094
Alexandre Rames5319def2014-10-23 10:03:10 +01001095 case Primitive::kPrimBoolean:
1096 case Primitive::kPrimByte:
1097 case Primitive::kPrimChar:
1098 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +01001099 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +01001100 LOG(FATAL) << "Unexpected type " << type;
1101 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001102
Alexandre Rames5319def2014-10-23 10:03:10 +01001103 LOG(FATAL) << "Unreachable";
1104 return Location::NoLocation();
1105}
1106
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001107void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001108 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001109 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001110 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +01001111 vixl::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001112 if (value_can_be_null) {
1113 __ Cbz(value, &done);
1114 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001115 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
1116 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001117 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001118 if (value_can_be_null) {
1119 __ Bind(&done);
1120 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001121}
1122
David Brazdil58282f42016-01-14 12:45:10 +00001123void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001124 // Blocked core registers:
1125 // lr : Runtime reserved.
1126 // tr : Runtime reserved.
1127 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1128 // ip1 : VIXL core temp.
1129 // ip0 : VIXL core temp.
1130 //
1131 // Blocked fp registers:
1132 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001133 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1134 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001135 while (!reserved_core_registers.IsEmpty()) {
1136 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
1137 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001138
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001139 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001140 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001141 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
1142 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001143
David Brazdil58282f42016-01-14 12:45:10 +00001144 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001145 // Stubs do not save callee-save floating point registers. If the graph
1146 // is debuggable, we need to deal with these registers differently. For
1147 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001148 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1149 while (!reserved_fp_registers_debuggable.IsEmpty()) {
1150 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().code()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001151 }
1152 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001153}
1154
Alexandre Rames3e69f162014-12-10 10:36:50 +00001155size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1156 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1157 __ Str(reg, MemOperand(sp, stack_index));
1158 return kArm64WordSize;
1159}
1160
1161size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1162 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1163 __ Ldr(reg, MemOperand(sp, stack_index));
1164 return kArm64WordSize;
1165}
1166
1167size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1168 FPRegister reg = FPRegister(reg_id, kDRegSize);
1169 __ Str(reg, MemOperand(sp, stack_index));
1170 return kArm64WordSize;
1171}
1172
1173size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1174 FPRegister reg = FPRegister(reg_id, kDRegSize);
1175 __ Ldr(reg, MemOperand(sp, stack_index));
1176 return kArm64WordSize;
1177}
1178
Alexandre Rames5319def2014-10-23 10:03:10 +01001179void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001180 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001181}
1182
1183void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001184 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001185}
1186
Alexandre Rames67555f72014-11-18 10:55:16 +00001187void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001188 if (constant->IsIntConstant()) {
1189 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1190 } else if (constant->IsLongConstant()) {
1191 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1192 } else if (constant->IsNullConstant()) {
1193 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001194 } else if (constant->IsFloatConstant()) {
1195 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1196 } else {
1197 DCHECK(constant->IsDoubleConstant());
1198 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1199 }
1200}
1201
Alexandre Rames3e69f162014-12-10 10:36:50 +00001202
1203static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1204 DCHECK(constant.IsConstant());
1205 HConstant* cst = constant.GetConstant();
1206 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001207 // Null is mapped to a core W register, which we associate with kPrimInt.
1208 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001209 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1210 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1211 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1212}
1213
Calin Juravlee460d1d2015-09-29 04:52:17 +01001214void CodeGeneratorARM64::MoveLocation(Location destination,
1215 Location source,
1216 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001217 if (source.Equals(destination)) {
1218 return;
1219 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001220
1221 // A valid move can always be inferred from the destination and source
1222 // locations. When moving from and to a register, the argument type can be
1223 // used to generate 32bit instead of 64bit moves. In debug mode we also
1224 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001225 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001226
1227 if (destination.IsRegister() || destination.IsFpuRegister()) {
1228 if (unspecified_type) {
1229 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1230 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001231 (src_cst != nullptr && (src_cst->IsIntConstant()
1232 || src_cst->IsFloatConstant()
1233 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001234 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001235 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001236 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001237 // If the source is a double stack slot or a 64bit constant, a 64bit
1238 // type is appropriate. Else the source is a register, and since the
1239 // type has not been specified, we chose a 64bit type to force a 64bit
1240 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001241 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001242 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001243 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001244 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1245 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1246 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001247 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1248 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1249 __ Ldr(dst, StackOperandFrom(source));
1250 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001251 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001252 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001253 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001254 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001255 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001256 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001257 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001258 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1259 ? Primitive::kPrimLong
1260 : Primitive::kPrimInt;
1261 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1262 }
1263 } else {
1264 DCHECK(source.IsFpuRegister());
1265 if (destination.IsRegister()) {
1266 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1267 ? Primitive::kPrimDouble
1268 : Primitive::kPrimFloat;
1269 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1270 } else {
1271 DCHECK(destination.IsFpuRegister());
1272 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001273 }
1274 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001275 } else { // The destination is not a register. It must be a stack slot.
1276 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1277 if (source.IsRegister() || source.IsFpuRegister()) {
1278 if (unspecified_type) {
1279 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001280 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001281 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001282 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001283 }
1284 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001285 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1286 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1287 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001288 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001289 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1290 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001291 UseScratchRegisterScope temps(GetVIXLAssembler());
1292 HConstant* src_cst = source.GetConstant();
1293 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001294 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001295 temp = temps.AcquireW();
1296 } else if (src_cst->IsLongConstant()) {
1297 temp = temps.AcquireX();
1298 } else if (src_cst->IsFloatConstant()) {
1299 temp = temps.AcquireS();
1300 } else {
1301 DCHECK(src_cst->IsDoubleConstant());
1302 temp = temps.AcquireD();
1303 }
1304 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001305 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001306 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001307 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001308 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001309 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001310 // There is generally less pressure on FP registers.
1311 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001312 __ Ldr(temp, StackOperandFrom(source));
1313 __ Str(temp, StackOperandFrom(destination));
1314 }
1315 }
1316}
1317
1318void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001319 CPURegister dst,
1320 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001321 switch (type) {
1322 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001323 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001324 break;
1325 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001326 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001327 break;
1328 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001329 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001330 break;
1331 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001332 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001333 break;
1334 case Primitive::kPrimInt:
1335 case Primitive::kPrimNot:
1336 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001337 case Primitive::kPrimFloat:
1338 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001339 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001340 __ Ldr(dst, src);
1341 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001342 case Primitive::kPrimVoid:
1343 LOG(FATAL) << "Unreachable type " << type;
1344 }
1345}
1346
Calin Juravle77520bc2015-01-12 18:45:46 +00001347void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001348 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001349 const MemOperand& src,
1350 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001351 MacroAssembler* masm = GetVIXLAssembler();
1352 BlockPoolsScope block_pools(masm);
1353 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001354 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001355 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001356
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001357 DCHECK(!src.IsPreIndex());
1358 DCHECK(!src.IsPostIndex());
1359
1360 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001361 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001362 MemOperand base = MemOperand(temp_base);
1363 switch (type) {
1364 case Primitive::kPrimBoolean:
1365 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001366 if (needs_null_check) {
1367 MaybeRecordImplicitNullCheck(instruction);
1368 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001369 break;
1370 case Primitive::kPrimByte:
1371 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001372 if (needs_null_check) {
1373 MaybeRecordImplicitNullCheck(instruction);
1374 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001375 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1376 break;
1377 case Primitive::kPrimChar:
1378 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001379 if (needs_null_check) {
1380 MaybeRecordImplicitNullCheck(instruction);
1381 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001382 break;
1383 case Primitive::kPrimShort:
1384 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001385 if (needs_null_check) {
1386 MaybeRecordImplicitNullCheck(instruction);
1387 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001388 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1389 break;
1390 case Primitive::kPrimInt:
1391 case Primitive::kPrimNot:
1392 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001393 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001394 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001395 if (needs_null_check) {
1396 MaybeRecordImplicitNullCheck(instruction);
1397 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001398 break;
1399 case Primitive::kPrimFloat:
1400 case Primitive::kPrimDouble: {
1401 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001402 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001403
1404 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1405 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001406 if (needs_null_check) {
1407 MaybeRecordImplicitNullCheck(instruction);
1408 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001409 __ Fmov(FPRegister(dst), temp);
1410 break;
1411 }
1412 case Primitive::kPrimVoid:
1413 LOG(FATAL) << "Unreachable type " << type;
1414 }
1415}
1416
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001417void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001418 CPURegister src,
1419 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001420 switch (type) {
1421 case Primitive::kPrimBoolean:
1422 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001423 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001424 break;
1425 case Primitive::kPrimChar:
1426 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001427 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001428 break;
1429 case Primitive::kPrimInt:
1430 case Primitive::kPrimNot:
1431 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001432 case Primitive::kPrimFloat:
1433 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001434 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001435 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001436 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001437 case Primitive::kPrimVoid:
1438 LOG(FATAL) << "Unreachable type " << type;
1439 }
1440}
1441
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001442void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1443 CPURegister src,
1444 const MemOperand& dst) {
1445 UseScratchRegisterScope temps(GetVIXLAssembler());
1446 Register temp_base = temps.AcquireX();
1447
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001448 DCHECK(!dst.IsPreIndex());
1449 DCHECK(!dst.IsPostIndex());
1450
1451 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001452 Operand op = OperandFromMemOperand(dst);
1453 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001454 MemOperand base = MemOperand(temp_base);
1455 switch (type) {
1456 case Primitive::kPrimBoolean:
1457 case Primitive::kPrimByte:
1458 __ Stlrb(Register(src), base);
1459 break;
1460 case Primitive::kPrimChar:
1461 case Primitive::kPrimShort:
1462 __ Stlrh(Register(src), base);
1463 break;
1464 case Primitive::kPrimInt:
1465 case Primitive::kPrimNot:
1466 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001467 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001468 __ Stlr(Register(src), base);
1469 break;
1470 case Primitive::kPrimFloat:
1471 case Primitive::kPrimDouble: {
1472 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001473 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001474
1475 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1476 __ Fmov(temp, FPRegister(src));
1477 __ Stlr(temp, base);
1478 break;
1479 }
1480 case Primitive::kPrimVoid:
1481 LOG(FATAL) << "Unreachable type " << type;
1482 }
1483}
1484
Calin Juravle175dc732015-08-25 15:42:32 +01001485void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1486 HInstruction* instruction,
1487 uint32_t dex_pc,
1488 SlowPathCode* slow_path) {
1489 InvokeRuntime(GetThreadOffset<kArm64WordSize>(entrypoint).Int32Value(),
1490 instruction,
1491 dex_pc,
1492 slow_path);
1493}
1494
Alexandre Rames67555f72014-11-18 10:55:16 +00001495void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1496 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001497 uint32_t dex_pc,
1498 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001499 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001500 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001501 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1502 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001503 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001504}
1505
1506void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1507 vixl::Register class_reg) {
1508 UseScratchRegisterScope temps(GetVIXLAssembler());
1509 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001510 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1511
Serban Constantinescu02164b32014-11-13 14:05:07 +00001512 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001513 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1514 __ Add(temp, class_reg, status_offset);
1515 __ Ldar(temp, HeapOperand(temp));
1516 __ Cmp(temp, mirror::Class::kStatusInitialized);
1517 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001518 __ Bind(slow_path->GetExitLabel());
1519}
Alexandre Rames5319def2014-10-23 10:03:10 +01001520
Roland Levillain44015862016-01-22 11:47:17 +00001521void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001522 BarrierType type = BarrierAll;
1523
1524 switch (kind) {
1525 case MemBarrierKind::kAnyAny:
1526 case MemBarrierKind::kAnyStore: {
1527 type = BarrierAll;
1528 break;
1529 }
1530 case MemBarrierKind::kLoadAny: {
1531 type = BarrierReads;
1532 break;
1533 }
1534 case MemBarrierKind::kStoreStore: {
1535 type = BarrierWrites;
1536 break;
1537 }
1538 default:
1539 LOG(FATAL) << "Unexpected memory barrier " << kind;
1540 }
1541 __ Dmb(InnerShareable, type);
1542}
1543
Serban Constantinescu02164b32014-11-13 14:05:07 +00001544void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1545 HBasicBlock* successor) {
1546 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001547 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1548 if (slow_path == nullptr) {
1549 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1550 instruction->SetSlowPath(slow_path);
1551 codegen_->AddSlowPath(slow_path);
1552 if (successor != nullptr) {
1553 DCHECK(successor->IsLoopHeader());
1554 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1555 }
1556 } else {
1557 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1558 }
1559
Serban Constantinescu02164b32014-11-13 14:05:07 +00001560 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1561 Register temp = temps.AcquireW();
1562
1563 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1564 if (successor == nullptr) {
1565 __ Cbnz(temp, slow_path->GetEntryLabel());
1566 __ Bind(slow_path->GetReturnLabel());
1567 } else {
1568 __ Cbz(temp, codegen_->GetLabelOf(successor));
1569 __ B(slow_path->GetEntryLabel());
1570 // slow_path will return to GetLabelOf(successor).
1571 }
1572}
1573
Alexandre Rames5319def2014-10-23 10:03:10 +01001574InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1575 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001576 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001577 assembler_(codegen->GetAssembler()),
1578 codegen_(codegen) {}
1579
1580#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001581 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001582
1583#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1584
1585enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001586 // Using a base helps identify when we hit such breakpoints.
1587 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001588#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1589 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1590#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1591};
1592
1593#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001594 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001595 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1596 } \
1597 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1598 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1599 locations->SetOut(Location::Any()); \
1600 }
1601 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1602#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1603
1604#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001605#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001606
Alexandre Rames67555f72014-11-18 10:55:16 +00001607void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001608 DCHECK_EQ(instr->InputCount(), 2U);
1609 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1610 Primitive::Type type = instr->GetResultType();
1611 switch (type) {
1612 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001613 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001614 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001615 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001616 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001617 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001618
1619 case Primitive::kPrimFloat:
1620 case Primitive::kPrimDouble:
1621 locations->SetInAt(0, Location::RequiresFpuRegister());
1622 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001623 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001624 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001625
Alexandre Rames5319def2014-10-23 10:03:10 +01001626 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001627 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001628 }
1629}
1630
Alexandre Rames09a99962015-04-15 11:47:56 +01001631void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001632 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1633
1634 bool object_field_get_with_read_barrier =
1635 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001636 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001637 new (GetGraph()->GetArena()) LocationSummary(instruction,
1638 object_field_get_with_read_barrier ?
1639 LocationSummary::kCallOnSlowPath :
1640 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001641 locations->SetInAt(0, Location::RequiresRegister());
1642 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1643 locations->SetOut(Location::RequiresFpuRegister());
1644 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001645 // The output overlaps for an object field get when read barriers
1646 // are enabled: we do not want the load to overwrite the object's
1647 // location, as we need it to emit the read barrier.
1648 locations->SetOut(
1649 Location::RequiresRegister(),
1650 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001651 }
1652}
1653
1654void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1655 const FieldInfo& field_info) {
1656 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001657 LocationSummary* locations = instruction->GetLocations();
1658 Location base_loc = locations->InAt(0);
1659 Location out = locations->Out();
1660 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001661 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001662 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001663 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001664
Roland Levillain44015862016-01-22 11:47:17 +00001665 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1666 // Object FieldGet with Baker's read barrier case.
1667 MacroAssembler* masm = GetVIXLAssembler();
1668 UseScratchRegisterScope temps(masm);
1669 // /* HeapReference<Object> */ out = *(base + offset)
1670 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1671 Register temp = temps.AcquireW();
1672 // Note that potential implicit null checks are handled in this
1673 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1674 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1675 instruction,
1676 out,
1677 base,
1678 offset,
1679 temp,
1680 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001681 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001682 } else {
1683 // General case.
1684 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001685 // Note that a potential implicit null check is handled in this
1686 // CodeGeneratorARM64::LoadAcquire call.
1687 // NB: LoadAcquire will record the pc info if needed.
1688 codegen_->LoadAcquire(
1689 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001690 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001691 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001692 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001693 }
Roland Levillain44015862016-01-22 11:47:17 +00001694 if (field_type == Primitive::kPrimNot) {
1695 // If read barriers are enabled, emit read barriers other than
1696 // Baker's using a slow path (and also unpoison the loaded
1697 // reference, if heap poisoning is enabled).
1698 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1699 }
Roland Levillain4d027112015-07-01 15:41:14 +01001700 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001701}
1702
1703void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1704 LocationSummary* locations =
1705 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1706 locations->SetInAt(0, Location::RequiresRegister());
1707 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1708 locations->SetInAt(1, Location::RequiresFpuRegister());
1709 } else {
1710 locations->SetInAt(1, Location::RequiresRegister());
1711 }
1712}
1713
1714void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001715 const FieldInfo& field_info,
1716 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001717 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001718 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001719
1720 Register obj = InputRegisterAt(instruction, 0);
1721 CPURegister value = InputCPURegisterAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001722 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001723 Offset offset = field_info.GetFieldOffset();
1724 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001725
Roland Levillain4d027112015-07-01 15:41:14 +01001726 {
1727 // We use a block to end the scratch scope before the write barrier, thus
1728 // freeing the temporary registers so they can be used in `MarkGCCard`.
1729 UseScratchRegisterScope temps(GetVIXLAssembler());
1730
1731 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1732 DCHECK(value.IsW());
1733 Register temp = temps.AcquireW();
1734 __ Mov(temp, value.W());
1735 GetAssembler()->PoisonHeapReference(temp.W());
1736 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001737 }
Roland Levillain4d027112015-07-01 15:41:14 +01001738
1739 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001740 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1741 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001742 } else {
1743 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1744 codegen_->MaybeRecordImplicitNullCheck(instruction);
1745 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001746 }
1747
1748 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001749 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001750 }
1751}
1752
Alexandre Rames67555f72014-11-18 10:55:16 +00001753void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001754 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001755
1756 switch (type) {
1757 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001758 case Primitive::kPrimLong: {
1759 Register dst = OutputRegister(instr);
1760 Register lhs = InputRegisterAt(instr, 0);
1761 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001762 if (instr->IsAdd()) {
1763 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001764 } else if (instr->IsAnd()) {
1765 __ And(dst, lhs, rhs);
1766 } else if (instr->IsOr()) {
1767 __ Orr(dst, lhs, rhs);
1768 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001769 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001770 } else if (instr->IsRor()) {
1771 if (rhs.IsImmediate()) {
1772 uint32_t shift = rhs.immediate() & (lhs.SizeInBits() - 1);
1773 __ Ror(dst, lhs, shift);
1774 } else {
1775 // Ensure shift distance is in the same size register as the result. If
1776 // we are rotating a long and the shift comes in a w register originally,
1777 // we don't need to sxtw for use as an x since the shift distances are
1778 // all & reg_bits - 1.
1779 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1780 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001781 } else {
1782 DCHECK(instr->IsXor());
1783 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001784 }
1785 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001786 }
1787 case Primitive::kPrimFloat:
1788 case Primitive::kPrimDouble: {
1789 FPRegister dst = OutputFPRegister(instr);
1790 FPRegister lhs = InputFPRegisterAt(instr, 0);
1791 FPRegister rhs = InputFPRegisterAt(instr, 1);
1792 if (instr->IsAdd()) {
1793 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001794 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001795 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001796 } else {
1797 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001798 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001799 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001800 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001801 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001802 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001803 }
1804}
1805
Serban Constantinescu02164b32014-11-13 14:05:07 +00001806void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1807 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1808
1809 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1810 Primitive::Type type = instr->GetResultType();
1811 switch (type) {
1812 case Primitive::kPrimInt:
1813 case Primitive::kPrimLong: {
1814 locations->SetInAt(0, Location::RequiresRegister());
1815 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1816 locations->SetOut(Location::RequiresRegister());
1817 break;
1818 }
1819 default:
1820 LOG(FATAL) << "Unexpected shift type " << type;
1821 }
1822}
1823
1824void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1825 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1826
1827 Primitive::Type type = instr->GetType();
1828 switch (type) {
1829 case Primitive::kPrimInt:
1830 case Primitive::kPrimLong: {
1831 Register dst = OutputRegister(instr);
1832 Register lhs = InputRegisterAt(instr, 0);
1833 Operand rhs = InputOperandAt(instr, 1);
1834 if (rhs.IsImmediate()) {
1835 uint32_t shift_value = (type == Primitive::kPrimInt)
1836 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1837 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1838 if (instr->IsShl()) {
1839 __ Lsl(dst, lhs, shift_value);
1840 } else if (instr->IsShr()) {
1841 __ Asr(dst, lhs, shift_value);
1842 } else {
1843 __ Lsr(dst, lhs, shift_value);
1844 }
1845 } else {
1846 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1847
1848 if (instr->IsShl()) {
1849 __ Lsl(dst, lhs, rhs_reg);
1850 } else if (instr->IsShr()) {
1851 __ Asr(dst, lhs, rhs_reg);
1852 } else {
1853 __ Lsr(dst, lhs, rhs_reg);
1854 }
1855 }
1856 break;
1857 }
1858 default:
1859 LOG(FATAL) << "Unexpected shift operation type " << type;
1860 }
1861}
1862
Alexandre Rames5319def2014-10-23 10:03:10 +01001863void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001864 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001865}
1866
1867void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001868 HandleBinaryOp(instruction);
1869}
1870
1871void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1872 HandleBinaryOp(instruction);
1873}
1874
1875void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1876 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001877}
1878
Alexandre Rames8626b742015-11-25 16:28:08 +00001879void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1880 HArm64DataProcWithShifterOp* instruction) {
1881 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1882 instruction->GetType() == Primitive::kPrimLong);
1883 LocationSummary* locations =
1884 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1885 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1886 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1887 } else {
1888 locations->SetInAt(0, Location::RequiresRegister());
1889 }
1890 locations->SetInAt(1, Location::RequiresRegister());
1891 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1892}
1893
1894void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1895 HArm64DataProcWithShifterOp* instruction) {
1896 Primitive::Type type = instruction->GetType();
1897 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1898 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1899 Register out = OutputRegister(instruction);
1900 Register left;
1901 if (kind != HInstruction::kNeg) {
1902 left = InputRegisterAt(instruction, 0);
1903 }
1904 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1905 // shifter operand operation, the IR generating `right_reg` (input to the type
1906 // conversion) can have a different type from the current instruction's type,
1907 // so we manually indicate the type.
1908 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
1909 int64_t shift_amount = (type == Primitive::kPrimInt)
1910 ? static_cast<uint32_t>(instruction->GetShiftAmount() & kMaxIntShiftValue)
1911 : static_cast<uint32_t>(instruction->GetShiftAmount() & kMaxLongShiftValue);
1912
1913 Operand right_operand(0);
1914
1915 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1916 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1917 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1918 } else {
1919 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1920 }
1921
1922 // Logical binary operations do not support extension operations in the
1923 // operand. Note that VIXL would still manage if it was passed by generating
1924 // the extension as a separate instruction.
1925 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1926 DCHECK(!right_operand.IsExtendedRegister() ||
1927 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1928 kind != HInstruction::kNeg));
1929 switch (kind) {
1930 case HInstruction::kAdd:
1931 __ Add(out, left, right_operand);
1932 break;
1933 case HInstruction::kAnd:
1934 __ And(out, left, right_operand);
1935 break;
1936 case HInstruction::kNeg:
1937 DCHECK(instruction->InputAt(0)->AsConstant()->IsZero());
1938 __ Neg(out, right_operand);
1939 break;
1940 case HInstruction::kOr:
1941 __ Orr(out, left, right_operand);
1942 break;
1943 case HInstruction::kSub:
1944 __ Sub(out, left, right_operand);
1945 break;
1946 case HInstruction::kXor:
1947 __ Eor(out, left, right_operand);
1948 break;
1949 default:
1950 LOG(FATAL) << "Unexpected operation kind: " << kind;
1951 UNREACHABLE();
1952 }
1953}
1954
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001955void LocationsBuilderARM64::VisitArm64IntermediateAddress(HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001956 // The read barrier instrumentation does not support the
1957 // HArm64IntermediateAddress instruction yet.
1958 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001959 LocationSummary* locations =
1960 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1961 locations->SetInAt(0, Location::RequiresRegister());
1962 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1963 locations->SetOut(Location::RequiresRegister());
1964}
1965
1966void InstructionCodeGeneratorARM64::VisitArm64IntermediateAddress(
1967 HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001968 // The read barrier instrumentation does not support the
1969 // HArm64IntermediateAddress instruction yet.
1970 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001971 __ Add(OutputRegister(instruction),
1972 InputRegisterAt(instruction, 0),
1973 Operand(InputOperandAt(instruction, 1)));
1974}
1975
Nicolas Geoffray6b5afdd2016-01-22 09:31:52 +00001976void LocationsBuilderARM64::VisitArm64MultiplyAccumulate(HArm64MultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001977 LocationSummary* locations =
1978 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Nicolas Geoffray6b5afdd2016-01-22 09:31:52 +00001979 locations->SetInAt(HArm64MultiplyAccumulate::kInputAccumulatorIndex,
1980 Location::RequiresRegister());
1981 locations->SetInAt(HArm64MultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
1982 locations->SetInAt(HArm64MultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00001983 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1984}
1985
Nicolas Geoffray6b5afdd2016-01-22 09:31:52 +00001986void InstructionCodeGeneratorARM64::VisitArm64MultiplyAccumulate(HArm64MultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001987 Register res = OutputRegister(instr);
Nicolas Geoffray6b5afdd2016-01-22 09:31:52 +00001988 Register accumulator = InputRegisterAt(instr, HArm64MultiplyAccumulate::kInputAccumulatorIndex);
1989 Register mul_left = InputRegisterAt(instr, HArm64MultiplyAccumulate::kInputMulLeftIndex);
1990 Register mul_right = InputRegisterAt(instr, HArm64MultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00001991
1992 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
1993 // This fixup should be carried out for all multiply-accumulate instructions:
1994 // madd, msub, smaddl, smsubl, umaddl and umsubl.
1995 if (instr->GetType() == Primitive::kPrimLong &&
1996 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
1997 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
1998 vixl::Instruction* prev = masm->GetCursorAddress<vixl::Instruction*>() - vixl::kInstructionSize;
1999 if (prev->IsLoadOrStore()) {
2000 // Make sure we emit only exactly one nop.
2001 vixl::CodeBufferCheckScope scope(masm,
2002 vixl::kInstructionSize,
2003 vixl::CodeBufferCheckScope::kCheck,
2004 vixl::CodeBufferCheckScope::kExactSize);
2005 __ nop();
2006 }
2007 }
2008
2009 if (instr->GetOpKind() == HInstruction::kAdd) {
2010 __ Madd(res, mul_left, mul_right, accumulator);
2011 } else {
2012 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Nicolas Geoffray6b5afdd2016-01-22 09:31:52 +00002013 __ Msub(res, mul_left, mul_right, accumulator);
Alexandre Rames418318f2015-11-20 15:55:47 +00002014 }
2015}
2016
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002017void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002018 bool object_array_get_with_read_barrier =
2019 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002020 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002021 new (GetGraph()->GetArena()) LocationSummary(instruction,
2022 object_array_get_with_read_barrier ?
2023 LocationSummary::kCallOnSlowPath :
2024 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002025 locations->SetInAt(0, Location::RequiresRegister());
2026 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002027 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2028 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2029 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002030 // The output overlaps in the case of an object array get with
2031 // read barriers enabled: we do not want the move to overwrite the
2032 // array's location, as we need it to emit the read barrier.
2033 locations->SetOut(
2034 Location::RequiresRegister(),
2035 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002036 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002037}
2038
2039void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002040 Primitive::Type type = instruction->GetType();
2041 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002042 LocationSummary* locations = instruction->GetLocations();
2043 Location index = locations->InAt(1);
2044 uint32_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Roland Levillain44015862016-01-22 11:47:17 +00002045 Location out = locations->Out();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002046
Alexandre Ramesd921d642015-04-16 15:07:16 +01002047 MacroAssembler* masm = GetVIXLAssembler();
2048 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002049 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002050 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002051
Roland Levillain44015862016-01-22 11:47:17 +00002052 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2053 // Object ArrayGet with Baker's read barrier case.
2054 Register temp = temps.AcquireW();
2055 // The read barrier instrumentation does not support the
2056 // HArm64IntermediateAddress instruction yet.
2057 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
2058 // Note that a potential implicit null check is handled in the
2059 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2060 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2061 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002062 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002063 // General case.
2064 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002065 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002066 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2067 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002068 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002069 Register temp = temps.AcquireSameSizeAs(obj);
2070 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
2071 // The read barrier instrumentation does not support the
2072 // HArm64IntermediateAddress instruction yet.
2073 DCHECK(!kEmitCompilerReadBarrier);
2074 // We do not need to compute the intermediate address from the array: the
2075 // input instruction has done it already. See the comment in
2076 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2077 if (kIsDebugBuild) {
2078 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2079 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2080 }
2081 temp = obj;
2082 } else {
2083 __ Add(temp, obj, offset);
2084 }
2085 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2086 }
2087
2088 codegen_->Load(type, OutputCPURegister(instruction), source);
2089 codegen_->MaybeRecordImplicitNullCheck(instruction);
2090
2091 if (type == Primitive::kPrimNot) {
2092 static_assert(
2093 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2094 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2095 Location obj_loc = locations->InAt(0);
2096 if (index.IsConstant()) {
2097 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2098 } else {
2099 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2100 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002101 }
Roland Levillain4d027112015-07-01 15:41:14 +01002102 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002103}
2104
Alexandre Rames5319def2014-10-23 10:03:10 +01002105void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2106 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2107 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002108 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002109}
2110
2111void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01002112 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01002113 __ Ldr(OutputRegister(instruction),
2114 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
Calin Juravle77520bc2015-01-12 18:45:46 +00002115 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002116}
2117
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002118void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002119 Primitive::Type value_type = instruction->GetComponentType();
2120
2121 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2122 bool object_array_set_with_read_barrier =
2123 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002124 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2125 instruction,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002126 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
2127 LocationSummary::kCallOnSlowPath :
2128 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002129 locations->SetInAt(0, Location::RequiresRegister());
2130 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002131 if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002132 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002133 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002134 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002135 }
2136}
2137
2138void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2139 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002140 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002141 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002142 bool needs_write_barrier =
2143 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002144
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002145 Register array = InputRegisterAt(instruction, 0);
2146 CPURegister value = InputCPURegisterAt(instruction, 2);
2147 CPURegister source = value;
2148 Location index = locations->InAt(1);
2149 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2150 MemOperand destination = HeapOperand(array);
2151 MacroAssembler* masm = GetVIXLAssembler();
2152 BlockPoolsScope block_pools(masm);
2153
2154 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002155 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002156 if (index.IsConstant()) {
2157 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2158 destination = HeapOperand(array, offset);
2159 } else {
2160 UseScratchRegisterScope temps(masm);
2161 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002162 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00002163 // The read barrier instrumentation does not support the
2164 // HArm64IntermediateAddress instruction yet.
2165 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002166 // We do not need to compute the intermediate address from the array: the
2167 // input instruction has done it already. See the comment in
2168 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2169 if (kIsDebugBuild) {
2170 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2171 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2172 }
2173 temp = array;
2174 } else {
2175 __ Add(temp, array, offset);
2176 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002177 destination = HeapOperand(temp,
2178 XRegisterFrom(index),
2179 LSL,
2180 Primitive::ComponentSizeShift(value_type));
2181 }
2182 codegen_->Store(value_type, value, destination);
2183 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002184 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002185 DCHECK(needs_write_barrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002186 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002187 vixl::Label done;
2188 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002189 {
2190 // We use a block to end the scratch scope before the write barrier, thus
2191 // freeing the temporary registers so they can be used in `MarkGCCard`.
2192 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002193 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002194 if (index.IsConstant()) {
2195 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002196 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002197 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002198 destination = HeapOperand(temp,
2199 XRegisterFrom(index),
2200 LSL,
2201 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002202 }
2203
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002204 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2205 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2206 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2207
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002208 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002209 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2210 codegen_->AddSlowPath(slow_path);
2211 if (instruction->GetValueCanBeNull()) {
2212 vixl::Label non_zero;
2213 __ Cbnz(Register(value), &non_zero);
2214 if (!index.IsConstant()) {
2215 __ Add(temp, array, offset);
2216 }
2217 __ Str(wzr, destination);
2218 codegen_->MaybeRecordImplicitNullCheck(instruction);
2219 __ B(&done);
2220 __ Bind(&non_zero);
2221 }
2222
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002223 if (kEmitCompilerReadBarrier) {
2224 // When read barriers are enabled, the type checking
2225 // instrumentation requires two read barriers:
2226 //
2227 // __ Mov(temp2, temp);
2228 // // /* HeapReference<Class> */ temp = temp->component_type_
2229 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002230 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002231 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2232 //
2233 // // /* HeapReference<Class> */ temp2 = value->klass_
2234 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002235 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002236 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2237 //
2238 // __ Cmp(temp, temp2);
2239 //
2240 // However, the second read barrier may trash `temp`, as it
2241 // is a temporary register, and as such would not be saved
2242 // along with live registers before calling the runtime (nor
2243 // restored afterwards). So in this case, we bail out and
2244 // delegate the work to the array set slow path.
2245 //
2246 // TODO: Extend the register allocator to support a new
2247 // "(locally) live temp" location so as to avoid always
2248 // going into the slow path when read barriers are enabled.
2249 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002250 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002251 Register temp2 = temps.AcquireSameSizeAs(array);
2252 // /* HeapReference<Class> */ temp = array->klass_
2253 __ Ldr(temp, HeapOperand(array, class_offset));
2254 codegen_->MaybeRecordImplicitNullCheck(instruction);
2255 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2256
2257 // /* HeapReference<Class> */ temp = temp->component_type_
2258 __ Ldr(temp, HeapOperand(temp, component_offset));
2259 // /* HeapReference<Class> */ temp2 = value->klass_
2260 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2261 // If heap poisoning is enabled, no need to unpoison `temp`
2262 // nor `temp2`, as we are comparing two poisoned references.
2263 __ Cmp(temp, temp2);
2264
2265 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2266 vixl::Label do_put;
2267 __ B(eq, &do_put);
2268 // If heap poisoning is enabled, the `temp` reference has
2269 // not been unpoisoned yet; unpoison it now.
2270 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2271
2272 // /* HeapReference<Class> */ temp = temp->super_class_
2273 __ Ldr(temp, HeapOperand(temp, super_offset));
2274 // If heap poisoning is enabled, no need to unpoison
2275 // `temp`, as we are comparing against null below.
2276 __ Cbnz(temp, slow_path->GetEntryLabel());
2277 __ Bind(&do_put);
2278 } else {
2279 __ B(ne, slow_path->GetEntryLabel());
2280 }
2281 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002282 }
2283 }
2284
2285 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002286 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002287 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002288 __ Mov(temp2, value.W());
2289 GetAssembler()->PoisonHeapReference(temp2);
2290 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002291 }
2292
2293 if (!index.IsConstant()) {
2294 __ Add(temp, array, offset);
2295 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002296 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002297
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002298 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002299 codegen_->MaybeRecordImplicitNullCheck(instruction);
2300 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002301 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002302
2303 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2304
2305 if (done.IsLinked()) {
2306 __ Bind(&done);
2307 }
2308
2309 if (slow_path != nullptr) {
2310 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002311 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002312 }
2313}
2314
Alexandre Rames67555f72014-11-18 10:55:16 +00002315void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002316 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2317 ? LocationSummary::kCallOnSlowPath
2318 : LocationSummary::kNoCall;
2319 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002320 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002321 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002322 if (instruction->HasUses()) {
2323 locations->SetOut(Location::SameAsFirstInput());
2324 }
2325}
2326
2327void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002328 BoundsCheckSlowPathARM64* slow_path =
2329 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002330 codegen_->AddSlowPath(slow_path);
2331
2332 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2333 __ B(slow_path->GetEntryLabel(), hs);
2334}
2335
Alexandre Rames67555f72014-11-18 10:55:16 +00002336void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2337 LocationSummary* locations =
2338 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2339 locations->SetInAt(0, Location::RequiresRegister());
2340 if (check->HasUses()) {
2341 locations->SetOut(Location::SameAsFirstInput());
2342 }
2343}
2344
2345void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2346 // We assume the class is not null.
2347 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2348 check->GetLoadClass(), check, check->GetDexPc(), true);
2349 codegen_->AddSlowPath(slow_path);
2350 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2351}
2352
Roland Levillain7f63c522015-07-13 15:54:55 +00002353static bool IsFloatingPointZeroConstant(HInstruction* instruction) {
2354 return (instruction->IsFloatConstant() && (instruction->AsFloatConstant()->GetValue() == 0.0f))
2355 || (instruction->IsDoubleConstant() && (instruction->AsDoubleConstant()->GetValue() == 0.0));
2356}
2357
Serban Constantinescu02164b32014-11-13 14:05:07 +00002358void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002359 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002360 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2361 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002362 switch (in_type) {
Aart Bika19616e2016-02-01 18:57:58 -08002363 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002364 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002365 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002366 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002367 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2368 break;
2369 }
2370 case Primitive::kPrimFloat:
2371 case Primitive::kPrimDouble: {
2372 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002373 locations->SetInAt(1,
2374 IsFloatingPointZeroConstant(compare->InputAt(1))
2375 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2376 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002377 locations->SetOut(Location::RequiresRegister());
2378 break;
2379 }
2380 default:
2381 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2382 }
2383}
2384
2385void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2386 Primitive::Type in_type = compare->InputAt(0)->GetType();
2387
2388 // 0 if: left == right
2389 // 1 if: left > right
2390 // -1 if: left < right
2391 switch (in_type) {
Aart Bika19616e2016-02-01 18:57:58 -08002392 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002393 case Primitive::kPrimLong: {
2394 Register result = OutputRegister(compare);
2395 Register left = InputRegisterAt(compare, 0);
2396 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002397 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002398 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2399 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002400 break;
2401 }
2402 case Primitive::kPrimFloat:
2403 case Primitive::kPrimDouble: {
2404 Register result = OutputRegister(compare);
2405 FPRegister left = InputFPRegisterAt(compare, 0);
Alexandre Rames93415462015-02-17 15:08:20 +00002406 if (compare->GetLocations()->InAt(1).IsConstant()) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002407 DCHECK(IsFloatingPointZeroConstant(compare->GetLocations()->InAt(1).GetConstant()));
2408 // 0.0 is the only immediate that can be encoded directly in an FCMP instruction.
Alexandre Rames93415462015-02-17 15:08:20 +00002409 __ Fcmp(left, 0.0);
2410 } else {
2411 __ Fcmp(left, InputFPRegisterAt(compare, 1));
2412 }
Vladimir Markod6e069b2016-01-18 11:11:01 +00002413 __ Cset(result, ne);
2414 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002415 break;
2416 }
2417 default:
2418 LOG(FATAL) << "Unimplemented compare type " << in_type;
2419 }
2420}
2421
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002422void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002423 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002424
2425 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2426 locations->SetInAt(0, Location::RequiresFpuRegister());
2427 locations->SetInAt(1,
2428 IsFloatingPointZeroConstant(instruction->InputAt(1))
2429 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2430 : Location::RequiresFpuRegister());
2431 } else {
2432 // Integer cases.
2433 locations->SetInAt(0, Location::RequiresRegister());
2434 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2435 }
2436
David Brazdilb3e773e2016-01-26 11:28:37 +00002437 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002438 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002439 }
2440}
2441
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002442void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002443 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002444 return;
2445 }
2446
2447 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002448 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002449 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002450
Roland Levillain7f63c522015-07-13 15:54:55 +00002451 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2452 FPRegister lhs = InputFPRegisterAt(instruction, 0);
2453 if (locations->InAt(1).IsConstant()) {
2454 DCHECK(IsFloatingPointZeroConstant(locations->InAt(1).GetConstant()));
2455 // 0.0 is the only immediate that can be encoded directly in an FCMP instruction.
2456 __ Fcmp(lhs, 0.0);
2457 } else {
2458 __ Fcmp(lhs, InputFPRegisterAt(instruction, 1));
2459 }
Vladimir Markod6e069b2016-01-18 11:11:01 +00002460 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002461 } else {
2462 // Integer cases.
2463 Register lhs = InputRegisterAt(instruction, 0);
2464 Operand rhs = InputOperandAt(instruction, 1);
2465 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002466 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002467 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002468}
2469
2470#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2471 M(Equal) \
2472 M(NotEqual) \
2473 M(LessThan) \
2474 M(LessThanOrEqual) \
2475 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002476 M(GreaterThanOrEqual) \
2477 M(Below) \
2478 M(BelowOrEqual) \
2479 M(Above) \
2480 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002481#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002482void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2483void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002484FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002485#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002486#undef FOR_EACH_CONDITION_INSTRUCTION
2487
Zheng Xuc6667102015-05-15 16:08:45 +08002488void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2489 DCHECK(instruction->IsDiv() || instruction->IsRem());
2490
2491 LocationSummary* locations = instruction->GetLocations();
2492 Location second = locations->InAt(1);
2493 DCHECK(second.IsConstant());
2494
2495 Register out = OutputRegister(instruction);
2496 Register dividend = InputRegisterAt(instruction, 0);
2497 int64_t imm = Int64FromConstant(second.GetConstant());
2498 DCHECK(imm == 1 || imm == -1);
2499
2500 if (instruction->IsRem()) {
2501 __ Mov(out, 0);
2502 } else {
2503 if (imm == 1) {
2504 __ Mov(out, dividend);
2505 } else {
2506 __ Neg(out, dividend);
2507 }
2508 }
2509}
2510
2511void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2512 DCHECK(instruction->IsDiv() || instruction->IsRem());
2513
2514 LocationSummary* locations = instruction->GetLocations();
2515 Location second = locations->InAt(1);
2516 DCHECK(second.IsConstant());
2517
2518 Register out = OutputRegister(instruction);
2519 Register dividend = InputRegisterAt(instruction, 0);
2520 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002521 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002522 int ctz_imm = CTZ(abs_imm);
2523
2524 UseScratchRegisterScope temps(GetVIXLAssembler());
2525 Register temp = temps.AcquireSameSizeAs(out);
2526
2527 if (instruction->IsDiv()) {
2528 __ Add(temp, dividend, abs_imm - 1);
2529 __ Cmp(dividend, 0);
2530 __ Csel(out, temp, dividend, lt);
2531 if (imm > 0) {
2532 __ Asr(out, out, ctz_imm);
2533 } else {
2534 __ Neg(out, Operand(out, ASR, ctz_imm));
2535 }
2536 } else {
2537 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2538 __ Asr(temp, dividend, bits - 1);
2539 __ Lsr(temp, temp, bits - ctz_imm);
2540 __ Add(out, dividend, temp);
2541 __ And(out, out, abs_imm - 1);
2542 __ Sub(out, out, temp);
2543 }
2544}
2545
2546void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2547 DCHECK(instruction->IsDiv() || instruction->IsRem());
2548
2549 LocationSummary* locations = instruction->GetLocations();
2550 Location second = locations->InAt(1);
2551 DCHECK(second.IsConstant());
2552
2553 Register out = OutputRegister(instruction);
2554 Register dividend = InputRegisterAt(instruction, 0);
2555 int64_t imm = Int64FromConstant(second.GetConstant());
2556
2557 Primitive::Type type = instruction->GetResultType();
2558 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2559
2560 int64_t magic;
2561 int shift;
2562 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2563
2564 UseScratchRegisterScope temps(GetVIXLAssembler());
2565 Register temp = temps.AcquireSameSizeAs(out);
2566
2567 // temp = get_high(dividend * magic)
2568 __ Mov(temp, magic);
2569 if (type == Primitive::kPrimLong) {
2570 __ Smulh(temp, dividend, temp);
2571 } else {
2572 __ Smull(temp.X(), dividend, temp);
2573 __ Lsr(temp.X(), temp.X(), 32);
2574 }
2575
2576 if (imm > 0 && magic < 0) {
2577 __ Add(temp, temp, dividend);
2578 } else if (imm < 0 && magic > 0) {
2579 __ Sub(temp, temp, dividend);
2580 }
2581
2582 if (shift != 0) {
2583 __ Asr(temp, temp, shift);
2584 }
2585
2586 if (instruction->IsDiv()) {
2587 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2588 } else {
2589 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2590 // TODO: Strength reduction for msub.
2591 Register temp_imm = temps.AcquireSameSizeAs(out);
2592 __ Mov(temp_imm, imm);
2593 __ Msub(out, temp, temp_imm, dividend);
2594 }
2595}
2596
2597void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2598 DCHECK(instruction->IsDiv() || instruction->IsRem());
2599 Primitive::Type type = instruction->GetResultType();
2600 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2601
2602 LocationSummary* locations = instruction->GetLocations();
2603 Register out = OutputRegister(instruction);
2604 Location second = locations->InAt(1);
2605
2606 if (second.IsConstant()) {
2607 int64_t imm = Int64FromConstant(second.GetConstant());
2608
2609 if (imm == 0) {
2610 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2611 } else if (imm == 1 || imm == -1) {
2612 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002613 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002614 DivRemByPowerOfTwo(instruction);
2615 } else {
2616 DCHECK(imm <= -2 || imm >= 2);
2617 GenerateDivRemWithAnyConstant(instruction);
2618 }
2619 } else {
2620 Register dividend = InputRegisterAt(instruction, 0);
2621 Register divisor = InputRegisterAt(instruction, 1);
2622 if (instruction->IsDiv()) {
2623 __ Sdiv(out, dividend, divisor);
2624 } else {
2625 UseScratchRegisterScope temps(GetVIXLAssembler());
2626 Register temp = temps.AcquireSameSizeAs(out);
2627 __ Sdiv(temp, dividend, divisor);
2628 __ Msub(out, temp, divisor, dividend);
2629 }
2630 }
2631}
2632
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002633void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2634 LocationSummary* locations =
2635 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2636 switch (div->GetResultType()) {
2637 case Primitive::kPrimInt:
2638 case Primitive::kPrimLong:
2639 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002640 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002641 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2642 break;
2643
2644 case Primitive::kPrimFloat:
2645 case Primitive::kPrimDouble:
2646 locations->SetInAt(0, Location::RequiresFpuRegister());
2647 locations->SetInAt(1, Location::RequiresFpuRegister());
2648 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2649 break;
2650
2651 default:
2652 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2653 }
2654}
2655
2656void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2657 Primitive::Type type = div->GetResultType();
2658 switch (type) {
2659 case Primitive::kPrimInt:
2660 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002661 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002662 break;
2663
2664 case Primitive::kPrimFloat:
2665 case Primitive::kPrimDouble:
2666 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2667 break;
2668
2669 default:
2670 LOG(FATAL) << "Unexpected div type " << type;
2671 }
2672}
2673
Alexandre Rames67555f72014-11-18 10:55:16 +00002674void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002675 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2676 ? LocationSummary::kCallOnSlowPath
2677 : LocationSummary::kNoCall;
2678 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002679 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2680 if (instruction->HasUses()) {
2681 locations->SetOut(Location::SameAsFirstInput());
2682 }
2683}
2684
2685void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2686 SlowPathCodeARM64* slow_path =
2687 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2688 codegen_->AddSlowPath(slow_path);
2689 Location value = instruction->GetLocations()->InAt(0);
2690
Alexandre Rames3e69f162014-12-10 10:36:50 +00002691 Primitive::Type type = instruction->GetType();
2692
Serguei Katkov8c0676c2015-08-03 13:55:33 +06002693 if ((type == Primitive::kPrimBoolean) || !Primitive::IsIntegralType(type)) {
2694 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002695 return;
2696 }
2697
Alexandre Rames67555f72014-11-18 10:55:16 +00002698 if (value.IsConstant()) {
2699 int64_t divisor = Int64ConstantFrom(value);
2700 if (divisor == 0) {
2701 __ B(slow_path->GetEntryLabel());
2702 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002703 // A division by a non-null constant is valid. We don't need to perform
2704 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002705 }
2706 } else {
2707 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2708 }
2709}
2710
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002711void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2712 LocationSummary* locations =
2713 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2714 locations->SetOut(Location::ConstantLocation(constant));
2715}
2716
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002717void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2718 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002719 // Will be generated at use site.
2720}
2721
Alexandre Rames5319def2014-10-23 10:03:10 +01002722void LocationsBuilderARM64::VisitExit(HExit* exit) {
2723 exit->SetLocations(nullptr);
2724}
2725
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002726void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002727}
2728
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002729void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2730 LocationSummary* locations =
2731 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2732 locations->SetOut(Location::ConstantLocation(constant));
2733}
2734
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002735void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002736 // Will be generated at use site.
2737}
2738
David Brazdilfc6a86a2015-06-26 10:33:45 +00002739void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002740 DCHECK(!successor->IsExitBlock());
2741 HBasicBlock* block = got->GetBlock();
2742 HInstruction* previous = got->GetPrevious();
2743 HLoopInformation* info = block->GetLoopInformation();
2744
David Brazdil46e2a392015-03-16 17:31:52 +00002745 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002746 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2747 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2748 return;
2749 }
2750 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2751 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2752 }
2753 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002754 __ B(codegen_->GetLabelOf(successor));
2755 }
2756}
2757
David Brazdilfc6a86a2015-06-26 10:33:45 +00002758void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2759 got->SetLocations(nullptr);
2760}
2761
2762void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2763 HandleGoto(got, got->GetSuccessor());
2764}
2765
2766void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2767 try_boundary->SetLocations(nullptr);
2768}
2769
2770void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2771 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2772 if (!successor->IsExitBlock()) {
2773 HandleGoto(try_boundary, successor);
2774 }
2775}
2776
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002777void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002778 size_t condition_input_index,
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002779 vixl::Label* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00002780 vixl::Label* false_target) {
2781 // FP branching requires both targets to be explicit. If either of the targets
2782 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
2783 vixl::Label fallthrough_target;
2784 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002785
David Brazdil0debae72015-11-12 18:37:00 +00002786 if (true_target == nullptr && false_target == nullptr) {
2787 // Nothing to do. The code always falls through.
2788 return;
2789 } else if (cond->IsIntConstant()) {
2790 // Constant condition, statically compared against 1.
2791 if (cond->AsIntConstant()->IsOne()) {
2792 if (true_target != nullptr) {
2793 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002794 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002795 } else {
David Brazdil0debae72015-11-12 18:37:00 +00002796 DCHECK(cond->AsIntConstant()->IsZero());
2797 if (false_target != nullptr) {
2798 __ B(false_target);
2799 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002800 }
David Brazdil0debae72015-11-12 18:37:00 +00002801 return;
2802 }
2803
2804 // The following code generates these patterns:
2805 // (1) true_target == nullptr && false_target != nullptr
2806 // - opposite condition true => branch to false_target
2807 // (2) true_target != nullptr && false_target == nullptr
2808 // - condition true => branch to true_target
2809 // (3) true_target != nullptr && false_target != nullptr
2810 // - condition true => branch to true_target
2811 // - branch to false_target
2812 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002813 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002814 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002815 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002816 if (true_target == nullptr) {
2817 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2818 } else {
2819 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2820 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002821 } else {
2822 // The condition instruction has not been materialized, use its inputs as
2823 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002824 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002825
David Brazdil0debae72015-11-12 18:37:00 +00002826 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002827 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002828 FPRegister lhs = InputFPRegisterAt(condition, 0);
2829 if (condition->GetLocations()->InAt(1).IsConstant()) {
2830 DCHECK(IsFloatingPointZeroConstant(condition->GetLocations()->InAt(1).GetConstant()));
2831 // 0.0 is the only immediate that can be encoded directly in an FCMP instruction.
2832 __ Fcmp(lhs, 0.0);
2833 } else {
2834 __ Fcmp(lhs, InputFPRegisterAt(condition, 1));
2835 }
David Brazdil0debae72015-11-12 18:37:00 +00002836 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002837 IfCondition opposite_condition = condition->GetOppositeCondition();
2838 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002839 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002840 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002841 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002842 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002843 // Integer cases.
2844 Register lhs = InputRegisterAt(condition, 0);
2845 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002846
2847 Condition arm64_cond;
2848 vixl::Label* non_fallthrough_target;
2849 if (true_target == nullptr) {
2850 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2851 non_fallthrough_target = false_target;
2852 } else {
2853 arm64_cond = ARM64Condition(condition->GetCondition());
2854 non_fallthrough_target = true_target;
2855 }
2856
Aart Bik086d27e2016-01-20 17:02:00 -08002857 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
2858 rhs.IsImmediate() && (rhs.immediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002859 switch (arm64_cond) {
2860 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002861 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002862 break;
2863 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002864 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002865 break;
2866 case lt:
2867 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002868 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002869 break;
2870 case ge:
2871 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002872 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002873 break;
2874 default:
2875 // Without the `static_cast` the compiler throws an error for
2876 // `-Werror=sign-promo`.
2877 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2878 }
2879 } else {
2880 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002881 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002882 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002883 }
2884 }
David Brazdil0debae72015-11-12 18:37:00 +00002885
2886 // If neither branch falls through (case 3), the conditional branch to `true_target`
2887 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2888 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002889 __ B(false_target);
2890 }
David Brazdil0debae72015-11-12 18:37:00 +00002891
2892 if (fallthrough_target.IsLinked()) {
2893 __ Bind(&fallthrough_target);
2894 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002895}
2896
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002897void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2898 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002899 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002900 locations->SetInAt(0, Location::RequiresRegister());
2901 }
2902}
2903
2904void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002905 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2906 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
2907 vixl::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2908 nullptr : codegen_->GetLabelOf(true_successor);
2909 vixl::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2910 nullptr : codegen_->GetLabelOf(false_successor);
2911 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002912}
2913
2914void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2915 LocationSummary* locations = new (GetGraph()->GetArena())
2916 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002917 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002918 locations->SetInAt(0, Location::RequiresRegister());
2919 }
2920}
2921
2922void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002923 SlowPathCodeARM64* slow_path =
2924 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002925 GenerateTestAndBranch(deoptimize,
2926 /* condition_input_index */ 0,
2927 slow_path->GetEntryLabel(),
2928 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002929}
2930
David Brazdilc0b601b2016-02-08 14:20:45 +00002931enum SelectVariant {
2932 kCsel,
2933 kCselFalseConst,
2934 kCselTrueConst,
2935 kFcsel,
2936};
2937
2938static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2939 return condition->IsCondition() &&
2940 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2941}
2942
2943static inline bool IsRecognizedCselConstant(HInstruction* constant) {
2944 if (constant->IsConstant()) {
2945 int64_t value = Int64FromConstant(constant->AsConstant());
2946 if ((value == -1) || (value == 0) || (value == 1)) {
2947 return true;
2948 }
2949 }
2950 return false;
2951}
2952
2953static inline SelectVariant GetSelectVariant(HSelect* select) {
2954 if (Primitive::IsFloatingPointType(select->GetType())) {
2955 return kFcsel;
2956 } else if (IsRecognizedCselConstant(select->GetFalseValue())) {
2957 return kCselFalseConst;
2958 } else if (IsRecognizedCselConstant(select->GetTrueValue())) {
2959 return kCselTrueConst;
2960 } else {
2961 return kCsel;
2962 }
2963}
2964
2965static inline bool HasSwappedInputs(SelectVariant variant) {
2966 return variant == kCselTrueConst;
2967}
2968
2969static inline Condition GetConditionForSelect(HCondition* condition, SelectVariant variant) {
2970 IfCondition cond = HasSwappedInputs(variant) ? condition->GetOppositeCondition()
2971 : condition->GetCondition();
2972 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
2973 : ARM64Condition(cond);
2974}
2975
David Brazdil74eb1b22015-12-14 11:44:01 +00002976void LocationsBuilderARM64::VisitSelect(HSelect* select) {
2977 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
David Brazdilc0b601b2016-02-08 14:20:45 +00002978 switch (GetSelectVariant(select)) {
2979 case kCsel:
2980 locations->SetInAt(0, Location::RequiresRegister());
2981 locations->SetInAt(1, Location::RequiresRegister());
2982 locations->SetOut(Location::RequiresRegister());
2983 break;
2984 case kCselFalseConst:
2985 locations->SetInAt(0, Location::ConstantLocation(select->InputAt(0)->AsConstant()));
2986 locations->SetInAt(1, Location::RequiresRegister());
2987 locations->SetOut(Location::RequiresRegister());
2988 break;
2989 case kCselTrueConst:
2990 locations->SetInAt(0, Location::RequiresRegister());
2991 locations->SetInAt(1, Location::ConstantLocation(select->InputAt(1)->AsConstant()));
2992 locations->SetOut(Location::RequiresRegister());
2993 break;
2994 case kFcsel:
2995 locations->SetInAt(0, Location::RequiresFpuRegister());
2996 locations->SetInAt(1, Location::RequiresFpuRegister());
2997 locations->SetOut(Location::RequiresFpuRegister());
2998 break;
David Brazdil74eb1b22015-12-14 11:44:01 +00002999 }
3000 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3001 locations->SetInAt(2, Location::RequiresRegister());
3002 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003003}
3004
3005void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003006 HInstruction* cond = select->GetCondition();
3007 SelectVariant variant = GetSelectVariant(select);
3008 Condition csel_cond;
3009
3010 if (IsBooleanValueOrMaterializedCondition(cond)) {
3011 if (cond->IsCondition() && cond->GetNext() == select) {
3012 // Condition codes set from previous instruction.
3013 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3014 } else {
3015 __ Cmp(InputRegisterAt(select, 2), 0);
3016 csel_cond = HasSwappedInputs(variant) ? eq : ne;
3017 }
3018 } else if (IsConditionOnFloatingPointValues(cond)) {
3019 Location rhs = cond->GetLocations()->InAt(1);
3020 if (rhs.IsConstant()) {
3021 DCHECK(IsFloatingPointZeroConstant(rhs.GetConstant()));
3022 __ Fcmp(InputFPRegisterAt(cond, 0), 0.0);
3023 } else {
3024 __ Fcmp(InputFPRegisterAt(cond, 0), InputFPRegisterAt(cond, 1));
3025 }
3026 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3027 } else {
3028 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
3029 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3030 }
3031
3032 switch (variant) {
3033 case kCsel:
3034 case kCselFalseConst:
3035 __ Csel(OutputRegister(select),
3036 InputRegisterAt(select, 1),
3037 InputOperandAt(select, 0),
3038 csel_cond);
3039 break;
3040 case kCselTrueConst:
3041 __ Csel(OutputRegister(select),
3042 InputRegisterAt(select, 0),
3043 InputOperandAt(select, 1),
3044 csel_cond);
3045 break;
3046 case kFcsel:
3047 __ Fcsel(OutputFPRegister(select),
3048 InputFPRegisterAt(select, 1),
3049 InputFPRegisterAt(select, 0),
3050 csel_cond);
3051 break;
3052 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003053}
3054
David Srbecky0cf44932015-12-09 14:09:59 +00003055void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3056 new (GetGraph()->GetArena()) LocationSummary(info);
3057}
3058
3059void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
David Srbeckyb7070a22016-01-08 18:13:53 +00003060 if (codegen_->HasStackMapAtCurrentPc()) {
3061 // Ensure that we do not collide with the stack map of the previous instruction.
3062 __ Nop();
3063 }
David Srbecky0cf44932015-12-09 14:09:59 +00003064 codegen_->RecordPcInfo(info, info->GetDexPc());
3065}
3066
Alexandre Rames5319def2014-10-23 10:03:10 +01003067void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003068 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003069}
3070
3071void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003072 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003073}
3074
3075void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003076 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003077}
3078
3079void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003080 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003081}
3082
Roland Levillain44015862016-01-22 11:47:17 +00003083static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3084 return kEmitCompilerReadBarrier &&
3085 (kUseBakerReadBarrier ||
3086 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3087 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3088 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3089}
3090
Alexandre Rames67555f72014-11-18 10:55:16 +00003091void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003092 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003093 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3094 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003095 case TypeCheckKind::kExactCheck:
3096 case TypeCheckKind::kAbstractClassCheck:
3097 case TypeCheckKind::kClassHierarchyCheck:
3098 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003099 call_kind =
3100 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003101 break;
3102 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003103 case TypeCheckKind::kUnresolvedCheck:
3104 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003105 call_kind = LocationSummary::kCallOnSlowPath;
3106 break;
3107 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003108
Alexandre Rames67555f72014-11-18 10:55:16 +00003109 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003110 locations->SetInAt(0, Location::RequiresRegister());
3111 locations->SetInAt(1, Location::RequiresRegister());
3112 // The "out" register is used as a temporary, so it overlaps with the inputs.
3113 // Note that TypeCheckSlowPathARM64 uses this register too.
3114 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3115 // When read barriers are enabled, we need a temporary register for
3116 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003117 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003118 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003119 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003120}
3121
3122void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003123 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003124 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003125 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003126 Register obj = InputRegisterAt(instruction, 0);
3127 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003128 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003129 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003130 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3131 locations->GetTemp(0) :
3132 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003133 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3134 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3135 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3136 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003137
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003138 vixl::Label done, zero;
3139 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003140
3141 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003142 // Avoid null check if we know `obj` is not null.
3143 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003144 __ Cbz(obj, &zero);
3145 }
3146
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003147 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003148 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003149
Roland Levillain44015862016-01-22 11:47:17 +00003150 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003151 case TypeCheckKind::kExactCheck: {
3152 __ Cmp(out, cls);
3153 __ Cset(out, eq);
3154 if (zero.IsLinked()) {
3155 __ B(&done);
3156 }
3157 break;
3158 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003159
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003160 case TypeCheckKind::kAbstractClassCheck: {
3161 // If the class is abstract, we eagerly fetch the super class of the
3162 // object to avoid doing a comparison we know will fail.
3163 vixl::Label loop, success;
3164 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003165 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003166 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003167 // If `out` is null, we use it for the result, and jump to `done`.
3168 __ Cbz(out, &done);
3169 __ Cmp(out, cls);
3170 __ B(ne, &loop);
3171 __ Mov(out, 1);
3172 if (zero.IsLinked()) {
3173 __ B(&done);
3174 }
3175 break;
3176 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003177
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003178 case TypeCheckKind::kClassHierarchyCheck: {
3179 // Walk over the class hierarchy to find a match.
3180 vixl::Label loop, success;
3181 __ Bind(&loop);
3182 __ Cmp(out, cls);
3183 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003184 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003185 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003186 __ Cbnz(out, &loop);
3187 // If `out` is null, we use it for the result, and jump to `done`.
3188 __ B(&done);
3189 __ Bind(&success);
3190 __ Mov(out, 1);
3191 if (zero.IsLinked()) {
3192 __ B(&done);
3193 }
3194 break;
3195 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003196
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003197 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003198 // Do an exact check.
3199 vixl::Label exact_check;
3200 __ Cmp(out, cls);
3201 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003202 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003203 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003204 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003205 // If `out` is null, we use it for the result, and jump to `done`.
3206 __ Cbz(out, &done);
3207 __ Ldrh(out, HeapOperand(out, primitive_offset));
3208 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3209 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003210 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003211 __ Mov(out, 1);
3212 __ B(&done);
3213 break;
3214 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003215
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003216 case TypeCheckKind::kArrayCheck: {
3217 __ Cmp(out, cls);
3218 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003219 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3220 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003221 codegen_->AddSlowPath(slow_path);
3222 __ B(ne, slow_path->GetEntryLabel());
3223 __ Mov(out, 1);
3224 if (zero.IsLinked()) {
3225 __ B(&done);
3226 }
3227 break;
3228 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003229
Calin Juravle98893e12015-10-02 21:05:03 +01003230 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003231 case TypeCheckKind::kInterfaceCheck: {
3232 // Note that we indeed only call on slow path, but we always go
3233 // into the slow path for the unresolved and interface check
3234 // cases.
3235 //
3236 // We cannot directly call the InstanceofNonTrivial runtime
3237 // entry point without resorting to a type checking slow path
3238 // here (i.e. by calling InvokeRuntime directly), as it would
3239 // require to assign fixed registers for the inputs of this
3240 // HInstanceOf instruction (following the runtime calling
3241 // convention), which might be cluttered by the potential first
3242 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003243 //
3244 // TODO: Introduce a new runtime entry point taking the object
3245 // to test (instead of its class) as argument, and let it deal
3246 // with the read barrier issues. This will let us refactor this
3247 // case of the `switch` code as it was previously (with a direct
3248 // call to the runtime not using a type checking slow path).
3249 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003250 DCHECK(locations->OnlyCallsOnSlowPath());
3251 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3252 /* is_fatal */ false);
3253 codegen_->AddSlowPath(slow_path);
3254 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003255 if (zero.IsLinked()) {
3256 __ B(&done);
3257 }
3258 break;
3259 }
3260 }
3261
3262 if (zero.IsLinked()) {
3263 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003264 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003265 }
3266
3267 if (done.IsLinked()) {
3268 __ Bind(&done);
3269 }
3270
3271 if (slow_path != nullptr) {
3272 __ Bind(slow_path->GetExitLabel());
3273 }
3274}
3275
3276void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3277 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3278 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3279
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003280 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3281 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003282 case TypeCheckKind::kExactCheck:
3283 case TypeCheckKind::kAbstractClassCheck:
3284 case TypeCheckKind::kClassHierarchyCheck:
3285 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003286 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3287 LocationSummary::kCallOnSlowPath :
3288 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003289 break;
3290 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003291 case TypeCheckKind::kUnresolvedCheck:
3292 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003293 call_kind = LocationSummary::kCallOnSlowPath;
3294 break;
3295 }
3296
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003297 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3298 locations->SetInAt(0, Location::RequiresRegister());
3299 locations->SetInAt(1, Location::RequiresRegister());
3300 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3301 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003302 // When read barriers are enabled, we need an additional temporary
3303 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003304 if (TypeCheckNeedsATemporary(type_check_kind)) {
3305 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003306 }
3307}
3308
3309void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003310 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003311 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003312 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003313 Register obj = InputRegisterAt(instruction, 0);
3314 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003315 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003316 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3317 locations->GetTemp(1) :
3318 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003319 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003320 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3321 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3322 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3323 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003324
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003325 bool is_type_check_slow_path_fatal =
3326 (type_check_kind == TypeCheckKind::kExactCheck ||
3327 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3328 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3329 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3330 !instruction->CanThrowIntoCatchBlock();
3331 SlowPathCodeARM64* type_check_slow_path =
3332 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3333 is_type_check_slow_path_fatal);
3334 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003335
3336 vixl::Label done;
3337 // Avoid null check if we know obj is not null.
3338 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003339 __ Cbz(obj, &done);
3340 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003341
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003342 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003343 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003344
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003345 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003346 case TypeCheckKind::kExactCheck:
3347 case TypeCheckKind::kArrayCheck: {
3348 __ Cmp(temp, cls);
3349 // Jump to slow path for throwing the exception or doing a
3350 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003351 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003352 break;
3353 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003354
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003355 case TypeCheckKind::kAbstractClassCheck: {
3356 // If the class is abstract, we eagerly fetch the super class of the
3357 // object to avoid doing a comparison we know will fail.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003358 vixl::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003359 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003360 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003361 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003362
3363 // If the class reference currently in `temp` is not null, jump
3364 // to the `compare_classes` label to compare it with the checked
3365 // class.
3366 __ Cbnz(temp, &compare_classes);
3367 // Otherwise, jump to the slow path to throw the exception.
3368 //
3369 // But before, move back the object's class into `temp` before
3370 // going into the slow path, as it has been overwritten in the
3371 // meantime.
3372 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003373 GenerateReferenceLoadTwoRegisters(
3374 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003375 __ B(type_check_slow_path->GetEntryLabel());
3376
3377 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003378 __ Cmp(temp, cls);
3379 __ B(ne, &loop);
3380 break;
3381 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003382
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003383 case TypeCheckKind::kClassHierarchyCheck: {
3384 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003385 vixl::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003386 __ Bind(&loop);
3387 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003388 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003389
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003390 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003391 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003392
3393 // If the class reference currently in `temp` is not null, jump
3394 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003395 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003396 // Otherwise, jump to the slow path to throw the exception.
3397 //
3398 // But before, move back the object's class into `temp` before
3399 // going into the slow path, as it has been overwritten in the
3400 // meantime.
3401 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003402 GenerateReferenceLoadTwoRegisters(
3403 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003404 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003405 break;
3406 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003407
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003408 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003409 // Do an exact check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003410 vixl::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003411 __ Cmp(temp, cls);
3412 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003413
3414 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003415 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003416 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003417
3418 // If the component type is not null (i.e. the object is indeed
3419 // an array), jump to label `check_non_primitive_component_type`
3420 // to further check that this component type is not a primitive
3421 // type.
3422 __ Cbnz(temp, &check_non_primitive_component_type);
3423 // Otherwise, jump to the slow path to throw the exception.
3424 //
3425 // But before, move back the object's class into `temp` before
3426 // going into the slow path, as it has been overwritten in the
3427 // meantime.
3428 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003429 GenerateReferenceLoadTwoRegisters(
3430 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003431 __ B(type_check_slow_path->GetEntryLabel());
3432
3433 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003434 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3435 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003436 __ Cbz(temp, &done);
3437 // Same comment as above regarding `temp` and the slow path.
3438 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003439 GenerateReferenceLoadTwoRegisters(
3440 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003441 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003442 break;
3443 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003444
Calin Juravle98893e12015-10-02 21:05:03 +01003445 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003446 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003447 // We always go into the type check slow path for the unresolved
3448 // and interface check cases.
3449 //
3450 // We cannot directly call the CheckCast runtime entry point
3451 // without resorting to a type checking slow path here (i.e. by
3452 // calling InvokeRuntime directly), as it would require to
3453 // assign fixed registers for the inputs of this HInstanceOf
3454 // instruction (following the runtime calling convention), which
3455 // might be cluttered by the potential first read barrier
3456 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003457 //
3458 // TODO: Introduce a new runtime entry point taking the object
3459 // to test (instead of its class) as argument, and let it deal
3460 // with the read barrier issues. This will let us refactor this
3461 // case of the `switch` code as it was previously (with a direct
3462 // call to the runtime not using a type checking slow path).
3463 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003464 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003465 break;
3466 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003467 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003468
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003469 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003470}
3471
Alexandre Rames5319def2014-10-23 10:03:10 +01003472void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3473 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3474 locations->SetOut(Location::ConstantLocation(constant));
3475}
3476
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003477void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003478 // Will be generated at use site.
3479}
3480
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003481void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3482 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3483 locations->SetOut(Location::ConstantLocation(constant));
3484}
3485
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003486void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003487 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003488}
3489
Calin Juravle175dc732015-08-25 15:42:32 +01003490void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3491 // The trampoline uses the same calling convention as dex calling conventions,
3492 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3493 // the method_idx.
3494 HandleInvoke(invoke);
3495}
3496
3497void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3498 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3499}
3500
Alexandre Rames5319def2014-10-23 10:03:10 +01003501void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003502 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003503 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003504}
3505
Alexandre Rames67555f72014-11-18 10:55:16 +00003506void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3507 HandleInvoke(invoke);
3508}
3509
3510void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3511 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003512 LocationSummary* locations = invoke->GetLocations();
3513 Register temp = XRegisterFrom(locations->GetTemp(0));
Mathieu Chartiere401d142015-04-22 13:56:20 -07003514 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
3515 invoke->GetImtIndex() % mirror::Class::kImtSize, kArm64PointerSize).Uint32Value();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003516 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003517 Offset class_offset = mirror::Object::ClassOffset();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003518 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003519
3520 // The register ip1 is required to be used for the hidden argument in
3521 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003522 MacroAssembler* masm = GetVIXLAssembler();
3523 UseScratchRegisterScope scratch_scope(masm);
3524 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003525 scratch_scope.Exclude(ip1);
3526 __ Mov(ip1, invoke->GetDexMethodIndex());
3527
Alexandre Rames67555f72014-11-18 10:55:16 +00003528 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003529 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003530 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003531 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003532 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003533 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003534 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003535 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003536 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003537 // Instead of simply (possibly) unpoisoning `temp` here, we should
3538 // emit a read barrier for the previous class reference load.
3539 // However this is not required in practice, as this is an
3540 // intermediate/temporary reference and because the current
3541 // concurrent copying collector keeps the from-space memory
3542 // intact/accessible until the end of the marking phase (the
3543 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003544 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Alexandre Rames67555f72014-11-18 10:55:16 +00003545 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003546 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003547 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003548 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003549 // lr();
3550 __ Blr(lr);
3551 DCHECK(!codegen_->IsLeafMethod());
3552 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3553}
3554
3555void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003556 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3557 if (intrinsic.TryDispatch(invoke)) {
3558 return;
3559 }
3560
Alexandre Rames67555f72014-11-18 10:55:16 +00003561 HandleInvoke(invoke);
3562}
3563
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003564void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003565 // Explicit clinit checks triggered by static invokes must have been pruned by
3566 // art::PrepareForRegisterAllocation.
3567 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003568
Andreas Gampe878d58c2015-01-15 23:24:00 -08003569 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3570 if (intrinsic.TryDispatch(invoke)) {
3571 return;
3572 }
3573
Alexandre Rames67555f72014-11-18 10:55:16 +00003574 HandleInvoke(invoke);
3575}
3576
Andreas Gampe878d58c2015-01-15 23:24:00 -08003577static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3578 if (invoke->GetLocations()->Intrinsified()) {
3579 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3580 intrinsic.Dispatch(invoke);
3581 return true;
3582 }
3583 return false;
3584}
3585
Vladimir Markodc151b22015-10-15 18:02:30 +01003586HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3587 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3588 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003589 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003590 return desired_dispatch_info;
3591}
3592
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003593void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003594 // For better instruction scheduling we load the direct code pointer before the method pointer.
3595 bool direct_code_loaded = false;
3596 switch (invoke->GetCodePtrLocation()) {
3597 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3598 // LR = code address from literal pool with link-time patch.
3599 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3600 direct_code_loaded = true;
3601 break;
3602 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3603 // LR = invoke->GetDirectCodePtr();
3604 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3605 direct_code_loaded = true;
3606 break;
3607 default:
3608 break;
3609 }
3610
Andreas Gampe878d58c2015-01-15 23:24:00 -08003611 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003612 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3613 switch (invoke->GetMethodLoadKind()) {
3614 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3615 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003616 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003617 break;
3618 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003619 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003620 break;
3621 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3622 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003623 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003624 break;
3625 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3626 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003627 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003628 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3629 break;
3630 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3631 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Marko0f7dca42015-11-02 14:36:43 +00003632 pc_relative_dex_cache_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
3633 invoke->GetDexCacheArrayOffset());
3634 vixl::Label* pc_insn_label = &pc_relative_dex_cache_patches_.back().label;
Vladimir Marko58155012015-08-19 12:49:41 +00003635 {
3636 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003637 __ Bind(pc_insn_label);
3638 __ adrp(XRegisterFrom(temp), 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003639 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +00003640 pc_relative_dex_cache_patches_.back().pc_insn_label = pc_insn_label;
Vladimir Marko58155012015-08-19 12:49:41 +00003641 // Add LDR with its PC-relative DexCache access patch.
Vladimir Marko0f7dca42015-11-02 14:36:43 +00003642 pc_relative_dex_cache_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
3643 invoke->GetDexCacheArrayOffset());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003644 {
3645 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
3646 __ Bind(&pc_relative_dex_cache_patches_.back().label);
3647 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), 0));
3648 pc_relative_dex_cache_patches_.back().pc_insn_label = pc_insn_label;
3649 }
Vladimir Marko58155012015-08-19 12:49:41 +00003650 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003651 }
Vladimir Marko58155012015-08-19 12:49:41 +00003652 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003653 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003654 Register reg = XRegisterFrom(temp);
3655 Register method_reg;
3656 if (current_method.IsRegister()) {
3657 method_reg = XRegisterFrom(current_method);
3658 } else {
3659 DCHECK(invoke->GetLocations()->Intrinsified());
3660 DCHECK(!current_method.IsValid());
3661 method_reg = reg;
3662 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3663 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003664
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003665 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003666 __ Ldr(reg.X(),
3667 MemOperand(method_reg.X(),
3668 ArtMethod::DexCacheResolvedMethodsOffset(kArm64WordSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003669 // temp = temp[index_in_cache];
3670 uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index;
3671 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3672 break;
3673 }
3674 }
3675
3676 switch (invoke->GetCodePtrLocation()) {
3677 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3678 __ Bl(&frame_entry_label_);
3679 break;
3680 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3681 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
3682 vixl::Label* label = &relative_call_patches_.back().label;
Alexandre Rames6dc01742015-11-12 14:44:19 +00003683 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
3684 __ Bind(label);
3685 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003686 break;
3687 }
3688 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3689 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3690 // LR prepared above for better instruction scheduling.
3691 DCHECK(direct_code_loaded);
3692 // lr()
3693 __ Blr(lr);
3694 break;
3695 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3696 // LR = callee_method->entry_point_from_quick_compiled_code_;
3697 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003698 XRegisterFrom(callee_method),
Vladimir Marko58155012015-08-19 12:49:41 +00003699 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value()));
3700 // lr()
3701 __ Blr(lr);
3702 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003703 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003704
Andreas Gampe878d58c2015-01-15 23:24:00 -08003705 DCHECK(!IsLeafMethod());
3706}
3707
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003708void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003709 // Use the calling convention instead of the location of the receiver, as
3710 // intrinsics may have put the receiver in a different register. In the intrinsics
3711 // slow path, the arguments have been moved to the right place, so here we are
3712 // guaranteed that the receiver is the first register of the calling convention.
3713 InvokeDexCallingConvention calling_convention;
3714 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003715 Register temp = XRegisterFrom(temp_in);
3716 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3717 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3718 Offset class_offset = mirror::Object::ClassOffset();
3719 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
3720
3721 BlockPoolsScope block_pools(GetVIXLAssembler());
3722
3723 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003724 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003725 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003726 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003727 // Instead of simply (possibly) unpoisoning `temp` here, we should
3728 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003729 // intermediate/temporary reference and because the current
3730 // concurrent copying collector keeps the from-space memory
3731 // intact/accessible until the end of the marking phase (the
3732 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003733 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3734 // temp = temp->GetMethodAt(method_offset);
3735 __ Ldr(temp, MemOperand(temp, method_offset));
3736 // lr = temp->GetEntryPoint();
3737 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3738 // lr();
3739 __ Blr(lr);
3740}
3741
Vladimir Marko58155012015-08-19 12:49:41 +00003742void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3743 DCHECK(linker_patches->empty());
3744 size_t size =
3745 method_patches_.size() +
3746 call_patches_.size() +
3747 relative_call_patches_.size() +
Vladimir Marko0f7dca42015-11-02 14:36:43 +00003748 pc_relative_dex_cache_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003749 linker_patches->reserve(size);
3750 for (const auto& entry : method_patches_) {
3751 const MethodReference& target_method = entry.first;
3752 vixl::Literal<uint64_t>* literal = entry.second;
3753 linker_patches->push_back(LinkerPatch::MethodPatch(literal->offset(),
3754 target_method.dex_file,
3755 target_method.dex_method_index));
3756 }
3757 for (const auto& entry : call_patches_) {
3758 const MethodReference& target_method = entry.first;
3759 vixl::Literal<uint64_t>* literal = entry.second;
3760 linker_patches->push_back(LinkerPatch::CodePatch(literal->offset(),
3761 target_method.dex_file,
3762 target_method.dex_method_index));
3763 }
3764 for (const MethodPatchInfo<vixl::Label>& info : relative_call_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003765 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003766 info.target_method.dex_file,
3767 info.target_method.dex_method_index));
3768 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +00003769 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003770 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003771 &info.target_dex_file,
Alexandre Rames6dc01742015-11-12 14:44:19 +00003772 info.pc_insn_label->location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003773 info.element_offset));
3774 }
3775}
3776
3777vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
3778 // Look up the literal for value.
3779 auto lb = uint64_literals_.lower_bound(value);
3780 if (lb != uint64_literals_.end() && !uint64_literals_.key_comp()(value, lb->first)) {
3781 return lb->second;
3782 }
3783 // We don't have a literal for this value, insert a new one.
3784 vixl::Literal<uint64_t>* literal = __ CreateLiteralDestroyedWithPool<uint64_t>(value);
3785 uint64_literals_.PutBefore(lb, value, literal);
3786 return literal;
3787}
3788
3789vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
3790 MethodReference target_method,
3791 MethodToLiteralMap* map) {
3792 // Look up the literal for target_method.
3793 auto lb = map->lower_bound(target_method);
3794 if (lb != map->end() && !map->key_comp()(target_method, lb->first)) {
3795 return lb->second;
3796 }
3797 // We don't have a literal for this method yet, insert a new one.
3798 vixl::Literal<uint64_t>* literal = __ CreateLiteralDestroyedWithPool<uint64_t>(0u);
3799 map->PutBefore(lb, target_method, literal);
3800 return literal;
3801}
3802
3803vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
3804 MethodReference target_method) {
3805 return DeduplicateMethodLiteral(target_method, &method_patches_);
3806}
3807
3808vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
3809 MethodReference target_method) {
3810 return DeduplicateMethodLiteral(target_method, &call_patches_);
3811}
3812
3813
Andreas Gampe878d58c2015-01-15 23:24:00 -08003814void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003815 // Explicit clinit checks triggered by static invokes must have been pruned by
3816 // art::PrepareForRegisterAllocation.
3817 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003818
Andreas Gampe878d58c2015-01-15 23:24:00 -08003819 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3820 return;
3821 }
3822
Alexandre Ramesd921d642015-04-16 15:07:16 +01003823 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003824 LocationSummary* locations = invoke->GetLocations();
3825 codegen_->GenerateStaticOrDirectCall(
3826 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003827 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003828}
3829
3830void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003831 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3832 return;
3833 }
3834
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003835 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003836 DCHECK(!codegen_->IsLeafMethod());
3837 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3838}
3839
Alexandre Rames67555f72014-11-18 10:55:16 +00003840void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003841 InvokeRuntimeCallingConvention calling_convention;
3842 CodeGenerator::CreateLoadClassLocationSummary(
3843 cls,
3844 LocationFrom(calling_convention.GetRegisterAt(0)),
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003845 LocationFrom(vixl::x0),
3846 /* code_generator_supports_read_barrier */ true);
Alexandre Rames67555f72014-11-18 10:55:16 +00003847}
3848
3849void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003850 if (cls->NeedsAccessCheck()) {
3851 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
3852 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
3853 cls,
3854 cls->GetDexPc(),
3855 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003856 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003857 return;
3858 }
3859
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003860 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01003861 Register out = OutputRegister(cls);
3862 Register current_method = InputRegisterAt(cls, 0);
3863 if (cls->IsReferrersClass()) {
Alexandre Rames67555f72014-11-18 10:55:16 +00003864 DCHECK(!cls->CanCallRuntime());
3865 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain44015862016-01-22 11:47:17 +00003866 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3867 GenerateGcRootFieldLoad(
3868 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Alexandre Rames67555f72014-11-18 10:55:16 +00003869 } else {
Vladimir Marko05792b92015-08-03 11:56:49 +01003870 MemberOffset resolved_types_offset = ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003871 // /* GcRoot<mirror::Class>[] */ out =
3872 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
Vladimir Marko05792b92015-08-03 11:56:49 +01003873 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00003874 // /* GcRoot<mirror::Class> */ out = out[type_index]
3875 GenerateGcRootFieldLoad(
3876 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003877
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00003878 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
3879 DCHECK(cls->CanCallRuntime());
3880 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
3881 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3882 codegen_->AddSlowPath(slow_path);
3883 if (!cls->IsInDexCache()) {
3884 __ Cbz(out, slow_path->GetEntryLabel());
3885 }
3886 if (cls->MustGenerateClinitCheck()) {
3887 GenerateClassInitializationCheck(slow_path, out);
3888 } else {
3889 __ Bind(slow_path->GetExitLabel());
3890 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003891 }
3892 }
3893}
3894
David Brazdilcb1c0552015-08-04 16:22:25 +01003895static MemOperand GetExceptionTlsAddress() {
3896 return MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
3897}
3898
Alexandre Rames67555f72014-11-18 10:55:16 +00003899void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
3900 LocationSummary* locations =
3901 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3902 locations->SetOut(Location::RequiresRegister());
3903}
3904
3905void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01003906 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
3907}
3908
3909void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
3910 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3911}
3912
3913void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
3914 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00003915}
3916
Alexandre Rames5319def2014-10-23 10:03:10 +01003917void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
3918 load->SetLocations(nullptr);
3919}
3920
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003921void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003922 // Nothing to do, this is driven by the code generator.
3923}
3924
Alexandre Rames67555f72014-11-18 10:55:16 +00003925void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray917d0162015-11-24 18:25:35 +00003926 LocationSummary::CallKind call_kind = (!load->IsInDexCache() || kEmitCompilerReadBarrier)
3927 ? LocationSummary::kCallOnSlowPath
3928 : LocationSummary::kNoCall;
3929 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01003930 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00003931 locations->SetOut(Location::RequiresRegister());
3932}
3933
3934void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003935 Location out_loc = load->GetLocations()->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003936 Register out = OutputRegister(load);
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01003937 Register current_method = InputRegisterAt(load, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003938
Roland Levillain44015862016-01-22 11:47:17 +00003939 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3940 GenerateGcRootFieldLoad(
3941 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003942 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
3943 __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00003944 // /* GcRoot<mirror::String> */ out = out[string_index]
3945 GenerateGcRootFieldLoad(
3946 load, out_loc, out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003947
Nicolas Geoffray917d0162015-11-24 18:25:35 +00003948 if (!load->IsInDexCache()) {
3949 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
3950 codegen_->AddSlowPath(slow_path);
3951 __ Cbz(out, slow_path->GetEntryLabel());
3952 __ Bind(slow_path->GetExitLabel());
3953 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003954}
3955
Alexandre Rames5319def2014-10-23 10:03:10 +01003956void LocationsBuilderARM64::VisitLocal(HLocal* local) {
3957 local->SetLocations(nullptr);
3958}
3959
3960void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
3961 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
3962}
3963
3964void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
3965 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3966 locations->SetOut(Location::ConstantLocation(constant));
3967}
3968
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003969void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003970 // Will be generated at use site.
3971}
3972
Alexandre Rames67555f72014-11-18 10:55:16 +00003973void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
3974 LocationSummary* locations =
3975 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3976 InvokeRuntimeCallingConvention calling_convention;
3977 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
3978}
3979
3980void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
3981 codegen_->InvokeRuntime(instruction->IsEnter()
3982 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3983 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00003984 instruction->GetDexPc(),
3985 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003986 if (instruction->IsEnter()) {
3987 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
3988 } else {
3989 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
3990 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003991}
3992
Alexandre Rames42d641b2014-10-27 14:00:51 +00003993void LocationsBuilderARM64::VisitMul(HMul* mul) {
3994 LocationSummary* locations =
3995 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3996 switch (mul->GetResultType()) {
3997 case Primitive::kPrimInt:
3998 case Primitive::kPrimLong:
3999 locations->SetInAt(0, Location::RequiresRegister());
4000 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004001 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004002 break;
4003
4004 case Primitive::kPrimFloat:
4005 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004006 locations->SetInAt(0, Location::RequiresFpuRegister());
4007 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004008 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004009 break;
4010
4011 default:
4012 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4013 }
4014}
4015
4016void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4017 switch (mul->GetResultType()) {
4018 case Primitive::kPrimInt:
4019 case Primitive::kPrimLong:
4020 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4021 break;
4022
4023 case Primitive::kPrimFloat:
4024 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004025 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004026 break;
4027
4028 default:
4029 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4030 }
4031}
4032
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004033void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4034 LocationSummary* locations =
4035 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4036 switch (neg->GetResultType()) {
4037 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004038 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004039 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004040 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004041 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004042
4043 case Primitive::kPrimFloat:
4044 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004045 locations->SetInAt(0, Location::RequiresFpuRegister());
4046 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004047 break;
4048
4049 default:
4050 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4051 }
4052}
4053
4054void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4055 switch (neg->GetResultType()) {
4056 case Primitive::kPrimInt:
4057 case Primitive::kPrimLong:
4058 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4059 break;
4060
4061 case Primitive::kPrimFloat:
4062 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004063 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004064 break;
4065
4066 default:
4067 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4068 }
4069}
4070
4071void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4072 LocationSummary* locations =
4073 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4074 InvokeRuntimeCallingConvention calling_convention;
4075 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004076 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004077 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004078 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004079}
4080
4081void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4082 LocationSummary* locations = instruction->GetLocations();
4083 InvokeRuntimeCallingConvention calling_convention;
4084 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4085 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004086 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004087 // Note: if heap poisoning is enabled, the entry point takes cares
4088 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01004089 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4090 instruction,
4091 instruction->GetDexPc(),
4092 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004093 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004094}
4095
Alexandre Rames5319def2014-10-23 10:03:10 +01004096void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4097 LocationSummary* locations =
4098 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4099 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004100 if (instruction->IsStringAlloc()) {
4101 locations->AddTemp(LocationFrom(kArtMethodRegister));
4102 } else {
4103 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4104 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4105 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004106 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4107}
4108
4109void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004110 // Note: if heap poisoning is enabled, the entry point takes cares
4111 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004112 if (instruction->IsStringAlloc()) {
4113 // String is allocated through StringFactory. Call NewEmptyString entry point.
4114 Location temp = instruction->GetLocations()->GetTemp(0);
4115 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
4116 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4117 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4118 __ Blr(lr);
4119 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4120 } else {
4121 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4122 instruction,
4123 instruction->GetDexPc(),
4124 nullptr);
4125 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4126 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004127}
4128
4129void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4130 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004131 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004132 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004133}
4134
4135void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004136 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004137 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004138 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004139 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004140 break;
4141
4142 default:
4143 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4144 }
4145}
4146
David Brazdil66d126e2015-04-03 16:02:44 +01004147void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4148 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4149 locations->SetInAt(0, Location::RequiresRegister());
4150 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4151}
4152
4153void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01004154 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
4155}
4156
Alexandre Rames5319def2014-10-23 10:03:10 +01004157void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004158 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4159 ? LocationSummary::kCallOnSlowPath
4160 : LocationSummary::kNoCall;
4161 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004162 locations->SetInAt(0, Location::RequiresRegister());
4163 if (instruction->HasUses()) {
4164 locations->SetOut(Location::SameAsFirstInput());
4165 }
4166}
4167
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004168void InstructionCodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004169 if (codegen_->CanMoveNullCheckToUser(instruction)) {
4170 return;
4171 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004172
Alexandre Ramesd921d642015-04-16 15:07:16 +01004173 BlockPoolsScope block_pools(GetVIXLAssembler());
4174 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004175 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
4176 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4177}
4178
4179void InstructionCodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004180 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
4181 codegen_->AddSlowPath(slow_path);
4182
4183 LocationSummary* locations = instruction->GetLocations();
4184 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004185
4186 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004187}
4188
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004189void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004190 if (codegen_->IsImplicitNullCheckAllowed(instruction)) {
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004191 GenerateImplicitNullCheck(instruction);
4192 } else {
4193 GenerateExplicitNullCheck(instruction);
4194 }
4195}
4196
Alexandre Rames67555f72014-11-18 10:55:16 +00004197void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4198 HandleBinaryOp(instruction);
4199}
4200
4201void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4202 HandleBinaryOp(instruction);
4203}
4204
Alexandre Rames3e69f162014-12-10 10:36:50 +00004205void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4206 LOG(FATAL) << "Unreachable";
4207}
4208
4209void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4210 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4211}
4212
Alexandre Rames5319def2014-10-23 10:03:10 +01004213void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4214 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4215 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4216 if (location.IsStackSlot()) {
4217 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4218 } else if (location.IsDoubleStackSlot()) {
4219 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4220 }
4221 locations->SetOut(location);
4222}
4223
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004224void InstructionCodeGeneratorARM64::VisitParameterValue(
4225 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004226 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004227}
4228
4229void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4230 LocationSummary* locations =
4231 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004232 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004233}
4234
4235void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4236 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4237 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004238}
4239
4240void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4241 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4242 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
4243 locations->SetInAt(i, Location::Any());
4244 }
4245 locations->SetOut(Location::Any());
4246}
4247
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004248void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004249 LOG(FATAL) << "Unreachable";
4250}
4251
Serban Constantinescu02164b32014-11-13 14:05:07 +00004252void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004253 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004254 LocationSummary::CallKind call_kind =
4255 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004256 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4257
4258 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004259 case Primitive::kPrimInt:
4260 case Primitive::kPrimLong:
4261 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004262 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004263 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4264 break;
4265
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004266 case Primitive::kPrimFloat:
4267 case Primitive::kPrimDouble: {
4268 InvokeRuntimeCallingConvention calling_convention;
4269 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4270 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4271 locations->SetOut(calling_convention.GetReturnLocation(type));
4272
4273 break;
4274 }
4275
Serban Constantinescu02164b32014-11-13 14:05:07 +00004276 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004277 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004278 }
4279}
4280
4281void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4282 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004283
Serban Constantinescu02164b32014-11-13 14:05:07 +00004284 switch (type) {
4285 case Primitive::kPrimInt:
4286 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004287 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004288 break;
4289 }
4290
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004291 case Primitive::kPrimFloat:
4292 case Primitive::kPrimDouble: {
4293 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
4294 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004295 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004296 if (type == Primitive::kPrimFloat) {
4297 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4298 } else {
4299 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4300 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004301 break;
4302 }
4303
Serban Constantinescu02164b32014-11-13 14:05:07 +00004304 default:
4305 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004306 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004307 }
4308}
4309
Calin Juravle27df7582015-04-17 19:12:31 +01004310void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4311 memory_barrier->SetLocations(nullptr);
4312}
4313
4314void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004315 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004316}
4317
Alexandre Rames5319def2014-10-23 10:03:10 +01004318void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4319 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4320 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004321 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004322}
4323
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004324void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004325 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004326}
4327
4328void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4329 instruction->SetLocations(nullptr);
4330}
4331
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004332void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004333 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004334}
4335
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004336void LocationsBuilderARM64::VisitRor(HRor* ror) {
4337 HandleBinaryOp(ror);
4338}
4339
4340void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4341 HandleBinaryOp(ror);
4342}
4343
Serban Constantinescu02164b32014-11-13 14:05:07 +00004344void LocationsBuilderARM64::VisitShl(HShl* shl) {
4345 HandleShift(shl);
4346}
4347
4348void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4349 HandleShift(shl);
4350}
4351
4352void LocationsBuilderARM64::VisitShr(HShr* shr) {
4353 HandleShift(shr);
4354}
4355
4356void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4357 HandleShift(shr);
4358}
4359
Alexandre Rames5319def2014-10-23 10:03:10 +01004360void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
4361 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
4362 Primitive::Type field_type = store->InputAt(1)->GetType();
4363 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004364 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01004365 case Primitive::kPrimBoolean:
4366 case Primitive::kPrimByte:
4367 case Primitive::kPrimChar:
4368 case Primitive::kPrimShort:
4369 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004370 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01004371 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
4372 break;
4373
4374 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004375 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01004376 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
4377 break;
4378
4379 default:
4380 LOG(FATAL) << "Unimplemented local type " << field_type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004381 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +01004382 }
4383}
4384
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004385void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004386}
4387
4388void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004389 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004390}
4391
4392void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004393 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004394}
4395
Alexandre Rames67555f72014-11-18 10:55:16 +00004396void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004397 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004398}
4399
4400void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004401 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004402}
4403
4404void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004405 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004406}
4407
Alexandre Rames67555f72014-11-18 10:55:16 +00004408void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004409 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004410}
4411
Calin Juravlee460d1d2015-09-29 04:52:17 +01004412void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4413 HUnresolvedInstanceFieldGet* instruction) {
4414 FieldAccessCallingConventionARM64 calling_convention;
4415 codegen_->CreateUnresolvedFieldLocationSummary(
4416 instruction, instruction->GetFieldType(), calling_convention);
4417}
4418
4419void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4420 HUnresolvedInstanceFieldGet* instruction) {
4421 FieldAccessCallingConventionARM64 calling_convention;
4422 codegen_->GenerateUnresolvedFieldAccess(instruction,
4423 instruction->GetFieldType(),
4424 instruction->GetFieldIndex(),
4425 instruction->GetDexPc(),
4426 calling_convention);
4427}
4428
4429void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4430 HUnresolvedInstanceFieldSet* instruction) {
4431 FieldAccessCallingConventionARM64 calling_convention;
4432 codegen_->CreateUnresolvedFieldLocationSummary(
4433 instruction, instruction->GetFieldType(), calling_convention);
4434}
4435
4436void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4437 HUnresolvedInstanceFieldSet* instruction) {
4438 FieldAccessCallingConventionARM64 calling_convention;
4439 codegen_->GenerateUnresolvedFieldAccess(instruction,
4440 instruction->GetFieldType(),
4441 instruction->GetFieldIndex(),
4442 instruction->GetDexPc(),
4443 calling_convention);
4444}
4445
4446void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4447 HUnresolvedStaticFieldGet* instruction) {
4448 FieldAccessCallingConventionARM64 calling_convention;
4449 codegen_->CreateUnresolvedFieldLocationSummary(
4450 instruction, instruction->GetFieldType(), calling_convention);
4451}
4452
4453void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4454 HUnresolvedStaticFieldGet* instruction) {
4455 FieldAccessCallingConventionARM64 calling_convention;
4456 codegen_->GenerateUnresolvedFieldAccess(instruction,
4457 instruction->GetFieldType(),
4458 instruction->GetFieldIndex(),
4459 instruction->GetDexPc(),
4460 calling_convention);
4461}
4462
4463void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4464 HUnresolvedStaticFieldSet* instruction) {
4465 FieldAccessCallingConventionARM64 calling_convention;
4466 codegen_->CreateUnresolvedFieldLocationSummary(
4467 instruction, instruction->GetFieldType(), calling_convention);
4468}
4469
4470void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4471 HUnresolvedStaticFieldSet* instruction) {
4472 FieldAccessCallingConventionARM64 calling_convention;
4473 codegen_->GenerateUnresolvedFieldAccess(instruction,
4474 instruction->GetFieldType(),
4475 instruction->GetFieldIndex(),
4476 instruction->GetDexPc(),
4477 calling_convention);
4478}
4479
Alexandre Rames5319def2014-10-23 10:03:10 +01004480void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4481 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4482}
4483
4484void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004485 HBasicBlock* block = instruction->GetBlock();
4486 if (block->GetLoopInformation() != nullptr) {
4487 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4488 // The back edge will generate the suspend check.
4489 return;
4490 }
4491 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4492 // The goto will generate the suspend check.
4493 return;
4494 }
4495 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004496}
4497
Alexandre Rames67555f72014-11-18 10:55:16 +00004498void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4499 LocationSummary* locations =
4500 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4501 InvokeRuntimeCallingConvention calling_convention;
4502 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4503}
4504
4505void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
4506 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004507 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004508 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004509}
4510
4511void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4512 LocationSummary* locations =
4513 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4514 Primitive::Type input_type = conversion->GetInputType();
4515 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004516 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004517 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4518 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4519 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4520 }
4521
Alexandre Rames542361f2015-01-29 16:57:31 +00004522 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004523 locations->SetInAt(0, Location::RequiresFpuRegister());
4524 } else {
4525 locations->SetInAt(0, Location::RequiresRegister());
4526 }
4527
Alexandre Rames542361f2015-01-29 16:57:31 +00004528 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004529 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4530 } else {
4531 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4532 }
4533}
4534
4535void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4536 Primitive::Type result_type = conversion->GetResultType();
4537 Primitive::Type input_type = conversion->GetInputType();
4538
4539 DCHECK_NE(input_type, result_type);
4540
Alexandre Rames542361f2015-01-29 16:57:31 +00004541 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004542 int result_size = Primitive::ComponentSize(result_type);
4543 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004544 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004545 Register output = OutputRegister(conversion);
4546 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004547 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004548 // 'int' values are used directly as W registers, discarding the top
4549 // bits, so we don't need to sign-extend and can just perform a move.
4550 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4551 // top 32 bits of the target register. We theoretically could leave those
4552 // bits unchanged, but we would have to make sure that no code uses a
4553 // 32bit input value as a 64bit value assuming that the top 32 bits are
4554 // zero.
4555 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004556 } else if (result_type == Primitive::kPrimChar ||
4557 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4558 __ Ubfx(output,
4559 output.IsX() ? source.X() : source.W(),
4560 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004561 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004562 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004563 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004564 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004565 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004566 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004567 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4568 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004569 } else if (Primitive::IsFloatingPointType(result_type) &&
4570 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004571 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4572 } else {
4573 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4574 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004575 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004576}
Alexandre Rames67555f72014-11-18 10:55:16 +00004577
Serban Constantinescu02164b32014-11-13 14:05:07 +00004578void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4579 HandleShift(ushr);
4580}
4581
4582void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4583 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004584}
4585
4586void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4587 HandleBinaryOp(instruction);
4588}
4589
4590void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4591 HandleBinaryOp(instruction);
4592}
4593
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004594void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004595 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004596 LOG(FATAL) << "Unreachable";
4597}
4598
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004599void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004600 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004601 LOG(FATAL) << "Unreachable";
4602}
4603
Mark Mendellfe57faa2015-09-18 09:26:15 -04004604// Simple implementation of packed switch - generate cascaded compare/jumps.
4605void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4606 LocationSummary* locations =
4607 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4608 locations->SetInAt(0, Location::RequiresRegister());
4609}
4610
4611void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4612 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004613 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004614 Register value_reg = InputRegisterAt(switch_instr, 0);
4615 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4616
Zheng Xu3927c8b2015-11-18 17:46:25 +08004617 // Roughly set 16 as max average assemblies generated per HIR in a graph.
4618 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * vixl::kInstructionSize;
4619 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4620 // make sure we don't emit it if the target may run out of range.
4621 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4622 // ranges and emit the tables only as required.
4623 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004624
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004625 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004626 // Current instruction id is an upper bound of the number of HIRs in the graph.
4627 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4628 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004629 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4630 Register temp = temps.AcquireW();
4631 __ Subs(temp, value_reg, Operand(lower_bound));
4632
Zheng Xu3927c8b2015-11-18 17:46:25 +08004633 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004634 // Jump to successors[0] if value == lower_bound.
4635 __ B(eq, codegen_->GetLabelOf(successors[0]));
4636 int32_t last_index = 0;
4637 for (; num_entries - last_index > 2; last_index += 2) {
4638 __ Subs(temp, temp, Operand(2));
4639 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4640 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4641 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4642 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4643 }
4644 if (num_entries - last_index == 2) {
4645 // The last missing case_value.
4646 __ Cmp(temp, Operand(1));
4647 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004648 }
4649
4650 // And the default for any other value.
4651 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4652 __ B(codegen_->GetLabelOf(default_block));
4653 }
4654 } else {
4655 JumpTableARM64* jump_table = new (GetGraph()->GetArena()) JumpTableARM64(switch_instr);
4656 codegen_->AddJumpTable(jump_table);
4657
4658 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4659
4660 // Below instructions should use at most one blocked register. Since there are two blocked
4661 // registers, we are free to block one.
4662 Register temp_w = temps.AcquireW();
4663 Register index;
4664 // Remove the bias.
4665 if (lower_bound != 0) {
4666 index = temp_w;
4667 __ Sub(index, value_reg, Operand(lower_bound));
4668 } else {
4669 index = value_reg;
4670 }
4671
4672 // Jump to default block if index is out of the range.
4673 __ Cmp(index, Operand(num_entries));
4674 __ B(hs, codegen_->GetLabelOf(default_block));
4675
4676 // In current VIXL implementation, it won't require any blocked registers to encode the
4677 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4678 // register pressure.
4679 Register table_base = temps.AcquireX();
4680 // Load jump offset from the table.
4681 __ Adr(table_base, jump_table->GetTableStartLabel());
4682 Register jump_offset = temp_w;
4683 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4684
4685 // Jump to target block by branching to table_base(pc related) + offset.
4686 Register target_address = table_base;
4687 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4688 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004689 }
4690}
4691
Roland Levillain44015862016-01-22 11:47:17 +00004692void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4693 Location out,
4694 uint32_t offset,
4695 Location maybe_temp) {
4696 Primitive::Type type = Primitive::kPrimNot;
4697 Register out_reg = RegisterFrom(out, type);
4698 if (kEmitCompilerReadBarrier) {
4699 Register temp_reg = RegisterFrom(maybe_temp, type);
4700 if (kUseBakerReadBarrier) {
4701 // Load with fast path based Baker's read barrier.
4702 // /* HeapReference<Object> */ out = *(out + offset)
4703 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4704 out,
4705 out_reg,
4706 offset,
4707 temp_reg,
4708 /* needs_null_check */ false,
4709 /* use_load_acquire */ false);
4710 } else {
4711 // Load with slow path based read barrier.
4712 // Save the value of `out` into `maybe_temp` before overwriting it
4713 // in the following move operation, as we will need it for the
4714 // read barrier below.
4715 __ Mov(temp_reg, out_reg);
4716 // /* HeapReference<Object> */ out = *(out + offset)
4717 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4718 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4719 }
4720 } else {
4721 // Plain load with no read barrier.
4722 // /* HeapReference<Object> */ out = *(out + offset)
4723 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4724 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4725 }
4726}
4727
4728void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
4729 Location out,
4730 Location obj,
4731 uint32_t offset,
4732 Location maybe_temp) {
4733 Primitive::Type type = Primitive::kPrimNot;
4734 Register out_reg = RegisterFrom(out, type);
4735 Register obj_reg = RegisterFrom(obj, type);
4736 if (kEmitCompilerReadBarrier) {
4737 if (kUseBakerReadBarrier) {
4738 // Load with fast path based Baker's read barrier.
4739 Register temp_reg = RegisterFrom(maybe_temp, type);
4740 // /* HeapReference<Object> */ out = *(obj + offset)
4741 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4742 out,
4743 obj_reg,
4744 offset,
4745 temp_reg,
4746 /* needs_null_check */ false,
4747 /* use_load_acquire */ false);
4748 } else {
4749 // Load with slow path based read barrier.
4750 // /* HeapReference<Object> */ out = *(obj + offset)
4751 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4752 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4753 }
4754 } else {
4755 // Plain load with no read barrier.
4756 // /* HeapReference<Object> */ out = *(obj + offset)
4757 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4758 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4759 }
4760}
4761
4762void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
4763 Location root,
4764 vixl::Register obj,
4765 uint32_t offset) {
4766 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
4767 if (kEmitCompilerReadBarrier) {
4768 if (kUseBakerReadBarrier) {
4769 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4770 // Baker's read barrier are used:
4771 //
4772 // root = obj.field;
4773 // if (Thread::Current()->GetIsGcMarking()) {
4774 // root = ReadBarrier::Mark(root)
4775 // }
4776
4777 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
4778 __ Ldr(root_reg, MemOperand(obj, offset));
4779 static_assert(
4780 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
4781 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
4782 "have different sizes.");
4783 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
4784 "art::mirror::CompressedReference<mirror::Object> and int32_t "
4785 "have different sizes.");
4786
4787 // Slow path used to mark the GC root `root`.
4788 SlowPathCodeARM64* slow_path =
4789 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root, root);
4790 codegen_->AddSlowPath(slow_path);
4791
4792 MacroAssembler* masm = GetVIXLAssembler();
4793 UseScratchRegisterScope temps(masm);
4794 Register temp = temps.AcquireW();
4795 // temp = Thread::Current()->GetIsGcMarking()
4796 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64WordSize>().Int32Value()));
4797 __ Cbnz(temp, slow_path->GetEntryLabel());
4798 __ Bind(slow_path->GetExitLabel());
4799 } else {
4800 // GC root loaded through a slow path for read barriers other
4801 // than Baker's.
4802 // /* GcRoot<mirror::Object>* */ root = obj + offset
4803 __ Add(root_reg.X(), obj.X(), offset);
4804 // /* mirror::Object* */ root = root->Read()
4805 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
4806 }
4807 } else {
4808 // Plain GC root load with no read barrier.
4809 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
4810 __ Ldr(root_reg, MemOperand(obj, offset));
4811 // Note that GC roots are not affected by heap poisoning, thus we
4812 // do not have to unpoison `root_reg` here.
4813 }
4814}
4815
4816void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
4817 Location ref,
4818 vixl::Register obj,
4819 uint32_t offset,
4820 Register temp,
4821 bool needs_null_check,
4822 bool use_load_acquire) {
4823 DCHECK(kEmitCompilerReadBarrier);
4824 DCHECK(kUseBakerReadBarrier);
4825
4826 // /* HeapReference<Object> */ ref = *(obj + offset)
4827 Location no_index = Location::NoLocation();
4828 GenerateReferenceLoadWithBakerReadBarrier(
4829 instruction, ref, obj, offset, no_index, temp, needs_null_check, use_load_acquire);
4830}
4831
4832void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
4833 Location ref,
4834 vixl::Register obj,
4835 uint32_t data_offset,
4836 Location index,
4837 Register temp,
4838 bool needs_null_check) {
4839 DCHECK(kEmitCompilerReadBarrier);
4840 DCHECK(kUseBakerReadBarrier);
4841
4842 // Array cells are never volatile variables, therefore array loads
4843 // never use Load-Acquire instructions on ARM64.
4844 const bool use_load_acquire = false;
4845
4846 // /* HeapReference<Object> */ ref =
4847 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4848 GenerateReferenceLoadWithBakerReadBarrier(
4849 instruction, ref, obj, data_offset, index, temp, needs_null_check, use_load_acquire);
4850}
4851
4852void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
4853 Location ref,
4854 vixl::Register obj,
4855 uint32_t offset,
4856 Location index,
4857 Register temp,
4858 bool needs_null_check,
4859 bool use_load_acquire) {
4860 DCHECK(kEmitCompilerReadBarrier);
4861 DCHECK(kUseBakerReadBarrier);
4862 // If `index` is a valid location, then we are emitting an array
4863 // load, so we shouldn't be using a Load Acquire instruction.
4864 // In other words: `index.IsValid()` => `!use_load_acquire`.
4865 DCHECK(!index.IsValid() || !use_load_acquire);
4866
4867 MacroAssembler* masm = GetVIXLAssembler();
4868 UseScratchRegisterScope temps(masm);
4869
4870 // In slow path based read barriers, the read barrier call is
4871 // inserted after the original load. However, in fast path based
4872 // Baker's read barriers, we need to perform the load of
4873 // mirror::Object::monitor_ *before* the original reference load.
4874 // This load-load ordering is required by the read barrier.
4875 // The fast path/slow path (for Baker's algorithm) should look like:
4876 //
4877 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
4878 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
4879 // HeapReference<Object> ref = *src; // Original reference load.
4880 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
4881 // if (is_gray) {
4882 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
4883 // }
4884 //
4885 // Note: the original implementation in ReadBarrier::Barrier is
4886 // slightly more complex as it performs additional checks that we do
4887 // not do here for performance reasons.
4888
4889 Primitive::Type type = Primitive::kPrimNot;
4890 Register ref_reg = RegisterFrom(ref, type);
4891 DCHECK(obj.IsW());
4892 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
4893
4894 // /* int32_t */ monitor = obj->monitor_
4895 __ Ldr(temp, HeapOperand(obj, monitor_offset));
4896 if (needs_null_check) {
4897 MaybeRecordImplicitNullCheck(instruction);
4898 }
4899 // /* LockWord */ lock_word = LockWord(monitor)
4900 static_assert(sizeof(LockWord) == sizeof(int32_t),
4901 "art::LockWord and int32_t have different sizes.");
4902 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
4903 __ Lsr(temp, temp, LockWord::kReadBarrierStateShift);
4904 __ And(temp, temp, Operand(LockWord::kReadBarrierStateMask));
4905 static_assert(
4906 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
4907 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
4908
4909 // Introduce a dependency on the high bits of rb_state, which shall
4910 // be all zeroes, to prevent load-load reordering, and without using
4911 // a memory barrier (which would be more expensive).
4912 // temp2 = rb_state & ~LockWord::kReadBarrierStateMask = 0
4913 Register temp2 = temps.AcquireW();
4914 __ Bic(temp2, temp, Operand(LockWord::kReadBarrierStateMask));
4915 // obj is unchanged by this operation, but its value now depends on
4916 // temp2, which depends on temp.
4917 __ Add(obj, obj, Operand(temp2));
4918 temps.Release(temp2);
4919
4920 // The actual reference load.
4921 if (index.IsValid()) {
4922 static_assert(
4923 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4924 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00004925 // /* HeapReference<Object> */ ref =
4926 // *(obj + offset + index * sizeof(HeapReference<Object>))
Roland Levillainca0bf032016-02-09 12:49:18 +00004927 const size_t shift_amount = Primitive::ComponentSizeShift(type);
Roland Levillain44015862016-01-22 11:47:17 +00004928 if (index.IsConstant()) {
Roland Levillainca0bf032016-02-09 12:49:18 +00004929 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << shift_amount);
4930 Load(type, ref_reg, HeapOperand(obj, computed_offset));
Roland Levillain44015862016-01-22 11:47:17 +00004931 } else {
Roland Levillainca0bf032016-02-09 12:49:18 +00004932 temp2 = temps.AcquireW();
Roland Levillain44015862016-01-22 11:47:17 +00004933 __ Add(temp2, obj, offset);
Roland Levillainca0bf032016-02-09 12:49:18 +00004934 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, shift_amount));
4935 temps.Release(temp2);
Roland Levillain44015862016-01-22 11:47:17 +00004936 }
Roland Levillain44015862016-01-22 11:47:17 +00004937 } else {
4938 // /* HeapReference<Object> */ ref = *(obj + offset)
4939 MemOperand field = HeapOperand(obj, offset);
4940 if (use_load_acquire) {
4941 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
4942 } else {
4943 Load(type, ref_reg, field);
4944 }
4945 }
4946
4947 // Object* ref = ref_addr->AsMirrorPtr()
4948 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
4949
4950 // Slow path used to mark the object `ref` when it is gray.
4951 SlowPathCodeARM64* slow_path =
4952 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref, ref);
4953 AddSlowPath(slow_path);
4954
4955 // if (rb_state == ReadBarrier::gray_ptr_)
4956 // ref = ReadBarrier::Mark(ref);
4957 __ Cmp(temp, ReadBarrier::gray_ptr_);
4958 __ B(eq, slow_path->GetEntryLabel());
4959 __ Bind(slow_path->GetExitLabel());
4960}
4961
4962void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
4963 Location out,
4964 Location ref,
4965 Location obj,
4966 uint32_t offset,
4967 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004968 DCHECK(kEmitCompilerReadBarrier);
4969
Roland Levillain44015862016-01-22 11:47:17 +00004970 // Insert a slow path based read barrier *after* the reference load.
4971 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004972 // If heap poisoning is enabled, the unpoisoning of the loaded
4973 // reference will be carried out by the runtime within the slow
4974 // path.
4975 //
4976 // Note that `ref` currently does not get unpoisoned (when heap
4977 // poisoning is enabled), which is alright as the `ref` argument is
4978 // not used by the artReadBarrierSlow entry point.
4979 //
4980 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
4981 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
4982 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
4983 AddSlowPath(slow_path);
4984
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004985 __ B(slow_path->GetEntryLabel());
4986 __ Bind(slow_path->GetExitLabel());
4987}
4988
Roland Levillain44015862016-01-22 11:47:17 +00004989void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
4990 Location out,
4991 Location ref,
4992 Location obj,
4993 uint32_t offset,
4994 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004995 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004996 // Baker's read barriers shall be handled by the fast path
4997 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
4998 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004999 // If heap poisoning is enabled, unpoisoning will be taken care of
5000 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005001 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005002 } else if (kPoisonHeapReferences) {
5003 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5004 }
5005}
5006
Roland Levillain44015862016-01-22 11:47:17 +00005007void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5008 Location out,
5009 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005010 DCHECK(kEmitCompilerReadBarrier);
5011
Roland Levillain44015862016-01-22 11:47:17 +00005012 // Insert a slow path based read barrier *after* the GC root load.
5013 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005014 // Note that GC roots are not affected by heap poisoning, so we do
5015 // not need to do anything special for this here.
5016 SlowPathCodeARM64* slow_path =
5017 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5018 AddSlowPath(slow_path);
5019
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005020 __ B(slow_path->GetEntryLabel());
5021 __ Bind(slow_path->GetExitLabel());
5022}
5023
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005024void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5025 LocationSummary* locations =
5026 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5027 locations->SetInAt(0, Location::RequiresRegister());
5028 locations->SetOut(Location::RequiresRegister());
5029}
5030
5031void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5032 LocationSummary* locations = instruction->GetLocations();
5033 uint32_t method_offset = 0;
5034 if (instruction->GetTableKind() == HClassTableGet::kVTable) {
5035 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5036 instruction->GetIndex(), kArm64PointerSize).SizeValue();
5037 } else {
5038 method_offset = mirror::Class::EmbeddedImTableEntryOffset(
5039 instruction->GetIndex() % mirror::Class::kImtSize, kArm64PointerSize).Uint32Value();
5040 }
5041 __ Ldr(XRegisterFrom(locations->Out()),
5042 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
5043}
5044
5045
5046
Alexandre Rames67555f72014-11-18 10:55:16 +00005047#undef __
5048#undef QUICK_ENTRY_POINT
5049
Alexandre Rames5319def2014-10-23 10:03:10 +01005050} // namespace arm64
5051} // namespace art