blob: a04918a6019b5458382664a25033707ae127e71c [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
36
37using namespace vixl; // NOLINT(build/namespaces)
38
39#ifdef __
40#error "ARM64 Codegen VIXL macro-assembler macro already defined."
41#endif
42
Alexandre Rames5319def2014-10-23 10:03:10 +010043namespace art {
44
Roland Levillain22ccc3a2015-11-24 13:10:05 +000045template<class MirrorType>
46class GcRoot;
47
Alexandre Rames5319def2014-10-23 10:03:10 +010048namespace arm64 {
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050using helpers::CPURegisterFrom;
51using helpers::DRegisterFrom;
52using helpers::FPRegisterFrom;
53using helpers::HeapOperand;
54using helpers::HeapOperandFrom;
55using helpers::InputCPURegisterAt;
56using helpers::InputFPRegisterAt;
57using helpers::InputRegisterAt;
58using helpers::InputOperandAt;
59using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080060using helpers::LocationFrom;
61using helpers::OperandFromMemOperand;
62using helpers::OutputCPURegister;
63using helpers::OutputFPRegister;
64using helpers::OutputRegister;
65using helpers::RegisterFrom;
66using helpers::StackOperandFrom;
67using helpers::VIXLRegCodeFromART;
68using helpers::WRegisterFrom;
69using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000070using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080071using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080072
Alexandre Rames5319def2014-10-23 10:03:10 +010073static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000074// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080075// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
76// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000077static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010078
Alexandre Rames5319def2014-10-23 10:03:10 +010079inline Condition ARM64Condition(IfCondition cond) {
80 switch (cond) {
81 case kCondEQ: return eq;
82 case kCondNE: return ne;
83 case kCondLT: return lt;
84 case kCondLE: return le;
85 case kCondGT: return gt;
86 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070087 case kCondB: return lo;
88 case kCondBE: return ls;
89 case kCondA: return hi;
90 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010091 }
Roland Levillain7f63c522015-07-13 15:54:55 +000092 LOG(FATAL) << "Unreachable";
93 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010094}
95
Vladimir Markod6e069b2016-01-18 11:11:01 +000096inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
97 // The ARM64 condition codes can express all the necessary branches, see the
98 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
99 // There is no dex instruction or HIR that would need the missing conditions
100 // "equal or unordered" or "not equal".
101 switch (cond) {
102 case kCondEQ: return eq;
103 case kCondNE: return ne /* unordered */;
104 case kCondLT: return gt_bias ? cc : lt /* unordered */;
105 case kCondLE: return gt_bias ? ls : le /* unordered */;
106 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
107 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
108 default:
109 LOG(FATAL) << "UNREACHABLE";
110 UNREACHABLE();
111 }
112}
113
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000114Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
116 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
117 // but we use the exact registers for clarity.
118 if (return_type == Primitive::kPrimFloat) {
119 return LocationFrom(s0);
120 } else if (return_type == Primitive::kPrimDouble) {
121 return LocationFrom(d0);
122 } else if (return_type == Primitive::kPrimLong) {
123 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100124 } else if (return_type == Primitive::kPrimVoid) {
125 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000126 } else {
127 return LocationFrom(w0);
128 }
129}
130
Alexandre Rames5319def2014-10-23 10:03:10 +0100131Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100133}
134
Alexandre Rames67555f72014-11-18 10:55:16 +0000135#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
136#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100137
Zheng Xuda403092015-04-24 17:35:39 +0800138// Calculate memory accessing operand for save/restore live registers.
139static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
140 RegisterSet* register_set,
141 int64_t spill_offset,
142 bool is_save) {
143 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
144 codegen->GetNumberOfCoreRegisters(),
145 register_set->GetFloatingPointRegisters(),
146 codegen->GetNumberOfFloatingPointRegisters()));
147
148 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
149 register_set->GetCoreRegisters() & (~callee_saved_core_registers.list()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000150 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
151 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.list()));
Zheng Xuda403092015-04-24 17:35:39 +0800152
153 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
154 UseScratchRegisterScope temps(masm);
155
156 Register base = masm->StackPointer();
157 int64_t core_spill_size = core_list.TotalSizeInBytes();
158 int64_t fp_spill_size = fp_list.TotalSizeInBytes();
159 int64_t reg_size = kXRegSizeInBytes;
160 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
161 uint32_t ls_access_size = WhichPowerOf2(reg_size);
162 if (((core_list.Count() > 1) || (fp_list.Count() > 1)) &&
163 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
164 // If the offset does not fit in the instruction's immediate field, use an alternate register
165 // to compute the base address(float point registers spill base address).
166 Register new_base = temps.AcquireSameSizeAs(base);
167 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
168 base = new_base;
169 spill_offset = -core_spill_size;
170 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
171 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
172 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
173 }
174
175 if (is_save) {
176 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
177 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
178 } else {
179 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
180 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
181 }
182}
183
184void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
185 RegisterSet* register_set = locations->GetLiveRegisters();
186 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
187 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
188 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
189 // If the register holds an object, update the stack mask.
190 if (locations->RegisterContainsObject(i)) {
191 locations->SetStackBit(stack_offset / kVRegSize);
192 }
193 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
194 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
195 saved_core_stack_offsets_[i] = stack_offset;
196 stack_offset += kXRegSizeInBytes;
197 }
198 }
199
200 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
201 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
202 register_set->ContainsFloatingPointRegister(i)) {
203 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
204 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
205 saved_fpu_stack_offsets_[i] = stack_offset;
206 stack_offset += kDRegSizeInBytes;
207 }
208 }
209
210 SaveRestoreLiveRegistersHelper(codegen, register_set,
211 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
212}
213
214void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
215 RegisterSet* register_set = locations->GetLiveRegisters();
216 SaveRestoreLiveRegistersHelper(codegen, register_set,
217 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
218}
219
Alexandre Rames5319def2014-10-23 10:03:10 +0100220class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
221 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000222 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100223
Alexandre Rames67555f72014-11-18 10:55:16 +0000224 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100225 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000226 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227
Alexandre Rames5319def2014-10-23 10:03:10 +0100228 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000229 if (instruction_->CanThrowIntoCatchBlock()) {
230 // Live registers will be restored in the catch block if caught.
231 SaveLiveRegisters(codegen, instruction_->GetLocations());
232 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000233 // We're moving two locations to locations that could overlap, so we need a parallel
234 // move resolver.
235 InvokeRuntimeCallingConvention calling_convention;
236 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100237 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
238 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000239 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000240 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800241 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100242 }
243
Alexandre Rames8158f282015-08-07 10:26:17 +0100244 bool IsFatal() const OVERRIDE { return true; }
245
Alexandre Rames9931f312015-06-19 14:47:01 +0100246 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
247
Alexandre Rames5319def2014-10-23 10:03:10 +0100248 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100249 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
250};
251
Alexandre Rames67555f72014-11-18 10:55:16 +0000252class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
253 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000254 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000255
256 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
257 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
258 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000259 if (instruction_->CanThrowIntoCatchBlock()) {
260 // Live registers will be restored in the catch block if caught.
261 SaveLiveRegisters(codegen, instruction_->GetLocations());
262 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000263 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000264 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800265 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000266 }
267
Alexandre Rames8158f282015-08-07 10:26:17 +0100268 bool IsFatal() const OVERRIDE { return true; }
269
Alexandre Rames9931f312015-06-19 14:47:01 +0100270 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
271
Alexandre Rames67555f72014-11-18 10:55:16 +0000272 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000273 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
274};
275
276class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
277 public:
278 LoadClassSlowPathARM64(HLoadClass* cls,
279 HInstruction* at,
280 uint32_t dex_pc,
281 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000282 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000283 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
284 }
285
286 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
287 LocationSummary* locations = at_->GetLocations();
288 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
289
290 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000291 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000292
293 InvokeRuntimeCallingConvention calling_convention;
294 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000295 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
296 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000297 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800298 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100299 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800300 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100301 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000303
304 // Move the class to the desired location.
305 Location out = locations->Out();
306 if (out.IsValid()) {
307 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
308 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000309 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000310 }
311
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000312 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000313 __ B(GetExitLabel());
314 }
315
Alexandre Rames9931f312015-06-19 14:47:01 +0100316 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
317
Alexandre Rames67555f72014-11-18 10:55:16 +0000318 private:
319 // The class this slow path will load.
320 HLoadClass* const cls_;
321
322 // The instruction where this slow path is happening.
323 // (Might be the load class or an initialization check).
324 HInstruction* const at_;
325
326 // The dex PC of `at_`.
327 const uint32_t dex_pc_;
328
329 // Whether to initialize the class.
330 const bool do_clinit_;
331
332 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
333};
334
335class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
336 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000337 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000338
339 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
340 LocationSummary* locations = instruction_->GetLocations();
341 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
342 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
343
344 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000345 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000346
347 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000348 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
349 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Alexandre Rames67555f72014-11-18 10:55:16 +0000350 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000351 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100352 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000353 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000354 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000355
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000356 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000357 __ B(GetExitLabel());
358 }
359
Alexandre Rames9931f312015-06-19 14:47:01 +0100360 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
361
Alexandre Rames67555f72014-11-18 10:55:16 +0000362 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000363 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
364};
365
Alexandre Rames5319def2014-10-23 10:03:10 +0100366class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
367 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000368 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100369
Alexandre Rames67555f72014-11-18 10:55:16 +0000370 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
371 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100372 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000373 if (instruction_->CanThrowIntoCatchBlock()) {
374 // Live registers will be restored in the catch block if caught.
375 SaveLiveRegisters(codegen, instruction_->GetLocations());
376 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000377 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000378 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800379 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100380 }
381
Alexandre Rames8158f282015-08-07 10:26:17 +0100382 bool IsFatal() const OVERRIDE { return true; }
383
Alexandre Rames9931f312015-06-19 14:47:01 +0100384 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
385
Alexandre Rames5319def2014-10-23 10:03:10 +0100386 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100387 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
388};
389
390class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
391 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100392 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000393 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100394
Alexandre Rames67555f72014-11-18 10:55:16 +0000395 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
396 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100397 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000398 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000399 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000400 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800401 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000402 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000403 if (successor_ == nullptr) {
404 __ B(GetReturnLabel());
405 } else {
406 __ B(arm64_codegen->GetLabelOf(successor_));
407 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100408 }
409
410 vixl::Label* GetReturnLabel() {
411 DCHECK(successor_ == nullptr);
412 return &return_label_;
413 }
414
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100415 HBasicBlock* GetSuccessor() const {
416 return successor_;
417 }
418
Alexandre Rames9931f312015-06-19 14:47:01 +0100419 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
420
Alexandre Rames5319def2014-10-23 10:03:10 +0100421 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100422 // If not null, the block to branch to after the suspend check.
423 HBasicBlock* const successor_;
424
425 // If `successor_` is null, the label to branch to after the suspend check.
426 vixl::Label return_label_;
427
428 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
429};
430
Alexandre Rames67555f72014-11-18 10:55:16 +0000431class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
432 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000433 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000434 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000435
436 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000437 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100438 Location class_to_check = locations->InAt(1);
439 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
440 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000441 DCHECK(instruction_->IsCheckCast()
442 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
443 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100444 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000445
Alexandre Rames67555f72014-11-18 10:55:16 +0000446 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000447
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000448 if (!is_fatal_) {
449 SaveLiveRegisters(codegen, locations);
450 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000451
452 // We're moving two locations to locations that could overlap, so we need a parallel
453 // move resolver.
454 InvokeRuntimeCallingConvention calling_convention;
455 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100456 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
457 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000458
459 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000460 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100461 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Roland Levillain888d0672015-11-23 18:53:50 +0000462 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
463 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000464 Primitive::Type ret_type = instruction_->GetType();
465 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
466 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
467 } else {
468 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100469 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800470 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000471 }
472
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000473 if (!is_fatal_) {
474 RestoreLiveRegisters(codegen, locations);
475 __ B(GetExitLabel());
476 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000477 }
478
Alexandre Rames9931f312015-06-19 14:47:01 +0100479 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000480 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100481
Alexandre Rames67555f72014-11-18 10:55:16 +0000482 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000483 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000484
Alexandre Rames67555f72014-11-18 10:55:16 +0000485 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
486};
487
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700488class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
489 public:
Aart Bik42249c32016-01-07 15:33:50 -0800490 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000491 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700492
493 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800494 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700495 __ Bind(GetEntryLabel());
496 SaveLiveRegisters(codegen, instruction_->GetLocations());
Aart Bik42249c32016-01-07 15:33:50 -0800497 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
498 instruction_,
499 instruction_->GetDexPc(),
500 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000501 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700502 }
503
Alexandre Rames9931f312015-06-19 14:47:01 +0100504 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
505
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700506 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700507 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
508};
509
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100510class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
511 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000512 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100513
514 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
515 LocationSummary* locations = instruction_->GetLocations();
516 __ Bind(GetEntryLabel());
517 SaveLiveRegisters(codegen, locations);
518
519 InvokeRuntimeCallingConvention calling_convention;
520 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
521 parallel_move.AddMove(
522 locations->InAt(0),
523 LocationFrom(calling_convention.GetRegisterAt(0)),
524 Primitive::kPrimNot,
525 nullptr);
526 parallel_move.AddMove(
527 locations->InAt(1),
528 LocationFrom(calling_convention.GetRegisterAt(1)),
529 Primitive::kPrimInt,
530 nullptr);
531 parallel_move.AddMove(
532 locations->InAt(2),
533 LocationFrom(calling_convention.GetRegisterAt(2)),
534 Primitive::kPrimNot,
535 nullptr);
536 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
537
538 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
539 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
540 instruction_,
541 instruction_->GetDexPc(),
542 this);
543 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
544 RestoreLiveRegisters(codegen, locations);
545 __ B(GetExitLabel());
546 }
547
548 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
549
550 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100551 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
552};
553
Zheng Xu3927c8b2015-11-18 17:46:25 +0800554void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
555 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000556 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800557
558 // We are about to use the assembler to place literals directly. Make sure we have enough
559 // underlying code buffer and we have generated the jump table with right size.
560 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
561 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
562
563 __ Bind(&table_start_);
564 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
565 for (uint32_t i = 0; i < num_entries; i++) {
566 vixl::Label* target_label = codegen->GetLabelOf(successors[i]);
567 DCHECK(target_label->IsBound());
568 ptrdiff_t jump_offset = target_label->location() - table_start_.location();
569 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
570 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
571 Literal<int32_t> literal(jump_offset);
572 __ place(&literal);
573 }
574}
575
Roland Levillain44015862016-01-22 11:47:17 +0000576// Slow path marking an object during a read barrier.
577class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
578 public:
579 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000580 : SlowPathCodeARM64(instruction), out_(out), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000581 DCHECK(kEmitCompilerReadBarrier);
582 }
583
584 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
585
586 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
587 LocationSummary* locations = instruction_->GetLocations();
588 Primitive::Type type = Primitive::kPrimNot;
589 DCHECK(locations->CanCall());
590 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
591 DCHECK(instruction_->IsInstanceFieldGet() ||
592 instruction_->IsStaticFieldGet() ||
593 instruction_->IsArrayGet() ||
594 instruction_->IsLoadClass() ||
595 instruction_->IsLoadString() ||
596 instruction_->IsInstanceOf() ||
597 instruction_->IsCheckCast())
598 << "Unexpected instruction in read barrier marking slow path: "
599 << instruction_->DebugName();
600
601 __ Bind(GetEntryLabel());
602 SaveLiveRegisters(codegen, locations);
603
604 InvokeRuntimeCallingConvention calling_convention;
605 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
606 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)), obj_, type);
607 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
608 instruction_,
609 instruction_->GetDexPc(),
610 this);
611 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
612 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
613
614 RestoreLiveRegisters(codegen, locations);
615 __ B(GetExitLabel());
616 }
617
618 private:
Roland Levillain44015862016-01-22 11:47:17 +0000619 const Location out_;
620 const Location obj_;
621
622 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
623};
624
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000625// Slow path generating a read barrier for a heap reference.
626class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
627 public:
628 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
629 Location out,
630 Location ref,
631 Location obj,
632 uint32_t offset,
633 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000634 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000635 out_(out),
636 ref_(ref),
637 obj_(obj),
638 offset_(offset),
639 index_(index) {
640 DCHECK(kEmitCompilerReadBarrier);
641 // If `obj` is equal to `out` or `ref`, it means the initial object
642 // has been overwritten by (or after) the heap object reference load
643 // to be instrumented, e.g.:
644 //
645 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000646 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000647 //
648 // In that case, we have lost the information about the original
649 // object, and the emitted read barrier cannot work properly.
650 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
651 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
652 }
653
654 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
655 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
656 LocationSummary* locations = instruction_->GetLocations();
657 Primitive::Type type = Primitive::kPrimNot;
658 DCHECK(locations->CanCall());
659 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
660 DCHECK(!instruction_->IsInvoke() ||
661 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain44015862016-01-22 11:47:17 +0000662 instruction_->GetLocations()->Intrinsified()))
663 << "Unexpected instruction in read barrier for heap reference slow path: "
664 << instruction_->DebugName();
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000665 // The read barrier instrumentation does not support the
666 // HArm64IntermediateAddress instruction yet.
667 DCHECK(!(instruction_->IsArrayGet() &&
668 instruction_->AsArrayGet()->GetArray()->IsArm64IntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000669
670 __ Bind(GetEntryLabel());
671
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000672 SaveLiveRegisters(codegen, locations);
673
674 // We may have to change the index's value, but as `index_` is a
675 // constant member (like other "inputs" of this slow path),
676 // introduce a copy of it, `index`.
677 Location index = index_;
678 if (index_.IsValid()) {
679 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
680 if (instruction_->IsArrayGet()) {
681 // Compute the actual memory offset and store it in `index`.
682 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
683 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
684 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
685 // We are about to change the value of `index_reg` (see the
686 // calls to vixl::MacroAssembler::Lsl and
687 // vixl::MacroAssembler::Mov below), but it has
688 // not been saved by the previous call to
689 // art::SlowPathCode::SaveLiveRegisters, as it is a
690 // callee-save register --
691 // art::SlowPathCode::SaveLiveRegisters does not consider
692 // callee-save registers, as it has been designed with the
693 // assumption that callee-save registers are supposed to be
694 // handled by the called function. So, as a callee-save
695 // register, `index_reg` _would_ eventually be saved onto
696 // the stack, but it would be too late: we would have
697 // changed its value earlier. Therefore, we manually save
698 // it here into another freely available register,
699 // `free_reg`, chosen of course among the caller-save
700 // registers (as a callee-save `free_reg` register would
701 // exhibit the same problem).
702 //
703 // Note we could have requested a temporary register from
704 // the register allocator instead; but we prefer not to, as
705 // this is a slow path, and we know we can find a
706 // caller-save register that is available.
707 Register free_reg = FindAvailableCallerSaveRegister(codegen);
708 __ Mov(free_reg.W(), index_reg);
709 index_reg = free_reg;
710 index = LocationFrom(index_reg);
711 } else {
712 // The initial register stored in `index_` has already been
713 // saved in the call to art::SlowPathCode::SaveLiveRegisters
714 // (as it is not a callee-save register), so we can freely
715 // use it.
716 }
717 // Shifting the index value contained in `index_reg` by the scale
718 // factor (2) cannot overflow in practice, as the runtime is
719 // unable to allocate object arrays with a size larger than
720 // 2^26 - 1 (that is, 2^28 - 4 bytes).
721 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
722 static_assert(
723 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
724 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
725 __ Add(index_reg, index_reg, Operand(offset_));
726 } else {
727 DCHECK(instruction_->IsInvoke());
728 DCHECK(instruction_->GetLocations()->Intrinsified());
729 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
730 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
731 << instruction_->AsInvoke()->GetIntrinsic();
732 DCHECK_EQ(offset_, 0U);
733 DCHECK(index_.IsRegisterPair());
734 // UnsafeGet's offset location is a register pair, the low
735 // part contains the correct offset.
736 index = index_.ToLow();
737 }
738 }
739
740 // We're moving two or three locations to locations that could
741 // overlap, so we need a parallel move resolver.
742 InvokeRuntimeCallingConvention calling_convention;
743 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
744 parallel_move.AddMove(ref_,
745 LocationFrom(calling_convention.GetRegisterAt(0)),
746 type,
747 nullptr);
748 parallel_move.AddMove(obj_,
749 LocationFrom(calling_convention.GetRegisterAt(1)),
750 type,
751 nullptr);
752 if (index.IsValid()) {
753 parallel_move.AddMove(index,
754 LocationFrom(calling_convention.GetRegisterAt(2)),
755 Primitive::kPrimInt,
756 nullptr);
757 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
758 } else {
759 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
760 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
761 }
762 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
763 instruction_,
764 instruction_->GetDexPc(),
765 this);
766 CheckEntrypointTypes<
767 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
768 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
769
770 RestoreLiveRegisters(codegen, locations);
771
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000772 __ B(GetExitLabel());
773 }
774
775 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
776
777 private:
778 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
779 size_t ref = static_cast<int>(XRegisterFrom(ref_).code());
780 size_t obj = static_cast<int>(XRegisterFrom(obj_).code());
781 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
782 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
783 return Register(VIXLRegCodeFromART(i), kXRegSize);
784 }
785 }
786 // We shall never fail to find a free caller-save register, as
787 // there are more than two core caller-save registers on ARM64
788 // (meaning it is possible to find one which is different from
789 // `ref` and `obj`).
790 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
791 LOG(FATAL) << "Could not find a free register";
792 UNREACHABLE();
793 }
794
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000795 const Location out_;
796 const Location ref_;
797 const Location obj_;
798 const uint32_t offset_;
799 // An additional location containing an index to an array.
800 // Only used for HArrayGet and the UnsafeGetObject &
801 // UnsafeGetObjectVolatile intrinsics.
802 const Location index_;
803
804 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
805};
806
807// Slow path generating a read barrier for a GC root.
808class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
809 public:
810 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000811 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000812 DCHECK(kEmitCompilerReadBarrier);
813 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000814
815 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
816 LocationSummary* locations = instruction_->GetLocations();
817 Primitive::Type type = Primitive::kPrimNot;
818 DCHECK(locations->CanCall());
819 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000820 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
821 << "Unexpected instruction in read barrier for GC root slow path: "
822 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000823
824 __ Bind(GetEntryLabel());
825 SaveLiveRegisters(codegen, locations);
826
827 InvokeRuntimeCallingConvention calling_convention;
828 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
829 // The argument of the ReadBarrierForRootSlow is not a managed
830 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
831 // thus we need a 64-bit move here, and we cannot use
832 //
833 // arm64_codegen->MoveLocation(
834 // LocationFrom(calling_convention.GetRegisterAt(0)),
835 // root_,
836 // type);
837 //
838 // which would emit a 32-bit move, as `type` is a (32-bit wide)
839 // reference type (`Primitive::kPrimNot`).
840 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
841 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
842 instruction_,
843 instruction_->GetDexPc(),
844 this);
845 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
846 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
847
848 RestoreLiveRegisters(codegen, locations);
849 __ B(GetExitLabel());
850 }
851
852 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
853
854 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000855 const Location out_;
856 const Location root_;
857
858 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
859};
860
Alexandre Rames5319def2014-10-23 10:03:10 +0100861#undef __
862
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100863Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100864 Location next_location;
865 if (type == Primitive::kPrimVoid) {
866 LOG(FATAL) << "Unreachable type " << type;
867 }
868
Alexandre Rames542361f2015-01-29 16:57:31 +0000869 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100870 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
871 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000872 } else if (!Primitive::IsFloatingPointType(type) &&
873 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000874 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
875 } else {
876 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000877 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
878 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100879 }
880
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000881 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000882 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100883 return next_location;
884}
885
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100886Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100887 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100888}
889
Serban Constantinescu579885a2015-02-22 20:51:33 +0000890CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
891 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100892 const CompilerOptions& compiler_options,
893 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100894 : CodeGenerator(graph,
895 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000896 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000897 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000898 callee_saved_core_registers.list(),
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000899 callee_saved_fp_registers.list(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100900 compiler_options,
901 stats),
Alexandre Rames5319def2014-10-23 10:03:10 +0100902 block_labels_(nullptr),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800903 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100904 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000905 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000906 move_resolver_(graph->GetArena(), this),
Vladimir Marko58155012015-08-19 12:49:41 +0000907 isa_features_(isa_features),
Vladimir Marko5233f932015-09-29 19:01:15 +0100908 uint64_literals_(std::less<uint64_t>(),
909 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
910 method_patches_(MethodReferenceComparator(),
911 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
912 call_patches_(MethodReferenceComparator(),
913 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
914 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000915 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000916 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000917 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000918}
Alexandre Rames5319def2014-10-23 10:03:10 +0100919
Alexandre Rames67555f72014-11-18 10:55:16 +0000920#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100921
Zheng Xu3927c8b2015-11-18 17:46:25 +0800922void CodeGeneratorARM64::EmitJumpTables() {
923 for (auto jump_table : jump_tables_) {
924 jump_table->EmitTable(this);
925 }
926}
927
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000928void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800929 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000930 // Ensure we emit the literal pool.
931 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000932
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000933 CodeGenerator::Finalize(allocator);
934}
935
Zheng Xuad4450e2015-04-17 18:48:56 +0800936void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
937 // Note: There are 6 kinds of moves:
938 // 1. constant -> GPR/FPR (non-cycle)
939 // 2. constant -> stack (non-cycle)
940 // 3. GPR/FPR -> GPR/FPR
941 // 4. GPR/FPR -> stack
942 // 5. stack -> GPR/FPR
943 // 6. stack -> stack (non-cycle)
944 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
945 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
946 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
947 // dependency.
948 vixl_temps_.Open(GetVIXLAssembler());
949}
950
951void ParallelMoveResolverARM64::FinishEmitNativeCode() {
952 vixl_temps_.Close();
953}
954
955Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
956 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
957 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
958 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
959 Location scratch = GetScratchLocation(kind);
960 if (!scratch.Equals(Location::NoLocation())) {
961 return scratch;
962 }
963 // Allocate from VIXL temp registers.
964 if (kind == Location::kRegister) {
965 scratch = LocationFrom(vixl_temps_.AcquireX());
966 } else {
967 DCHECK(kind == Location::kFpuRegister);
968 scratch = LocationFrom(vixl_temps_.AcquireD());
969 }
970 AddScratchLocation(scratch);
971 return scratch;
972}
973
974void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
975 if (loc.IsRegister()) {
976 vixl_temps_.Release(XRegisterFrom(loc));
977 } else {
978 DCHECK(loc.IsFpuRegister());
979 vixl_temps_.Release(DRegisterFrom(loc));
980 }
981 RemoveScratchLocation(loc);
982}
983
Alexandre Rames3e69f162014-12-10 10:36:50 +0000984void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100985 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +0100986 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000987}
988
Alexandre Rames5319def2014-10-23 10:03:10 +0100989void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100990 MacroAssembler* masm = GetVIXLAssembler();
991 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000992 __ Bind(&frame_entry_label_);
993
Serban Constantinescu02164b32014-11-13 14:05:07 +0000994 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
995 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100996 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000997 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000998 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000999 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001000 __ Ldr(wzr, MemOperand(temp, 0));
1001 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001002 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001003
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001004 if (!HasEmptyFrame()) {
1005 int frame_size = GetFrameSize();
1006 // Stack layout:
1007 // sp[frame_size - 8] : lr.
1008 // ... : other preserved core registers.
1009 // ... : other preserved fp registers.
1010 // ... : reserved frame space.
1011 // sp[0] : current method.
1012 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001013 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001014 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1015 frame_size - GetCoreSpillSize());
1016 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1017 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001018 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001019}
1020
1021void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001022 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001023 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001024 if (!HasEmptyFrame()) {
1025 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001026 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1027 frame_size - FrameEntrySpillSize());
1028 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1029 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001030 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001031 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001032 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001033 __ Ret();
1034 GetAssembler()->cfi().RestoreState();
1035 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001036}
1037
Zheng Xuda403092015-04-24 17:35:39 +08001038vixl::CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
1039 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
1040 return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize,
1041 core_spill_mask_);
1042}
1043
1044vixl::CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
1045 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1046 GetNumberOfFloatingPointRegisters()));
1047 return vixl::CPURegList(vixl::CPURegister::kFPRegister, vixl::kDRegSize,
1048 fpu_spill_mask_);
1049}
1050
Alexandre Rames5319def2014-10-23 10:03:10 +01001051void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1052 __ Bind(GetLabelOf(block));
1053}
1054
Calin Juravle175dc732015-08-25 15:42:32 +01001055void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1056 DCHECK(location.IsRegister());
1057 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1058}
1059
Calin Juravlee460d1d2015-09-29 04:52:17 +01001060void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1061 if (location.IsRegister()) {
1062 locations->AddTemp(location);
1063 } else {
1064 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1065 }
1066}
1067
Alexandre Rames5319def2014-10-23 10:03:10 +01001068Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
1069 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001070
Alexandre Rames5319def2014-10-23 10:03:10 +01001071 switch (type) {
1072 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001073 case Primitive::kPrimInt:
1074 case Primitive::kPrimFloat:
1075 return Location::StackSlot(GetStackSlot(load->GetLocal()));
1076
1077 case Primitive::kPrimLong:
1078 case Primitive::kPrimDouble:
1079 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
1080
Alexandre Rames5319def2014-10-23 10:03:10 +01001081 case Primitive::kPrimBoolean:
1082 case Primitive::kPrimByte:
1083 case Primitive::kPrimChar:
1084 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +01001085 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +01001086 LOG(FATAL) << "Unexpected type " << type;
1087 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001088
Alexandre Rames5319def2014-10-23 10:03:10 +01001089 LOG(FATAL) << "Unreachable";
1090 return Location::NoLocation();
1091}
1092
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001093void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001094 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001095 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001096 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +01001097 vixl::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001098 if (value_can_be_null) {
1099 __ Cbz(value, &done);
1100 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001101 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
1102 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001103 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001104 if (value_can_be_null) {
1105 __ Bind(&done);
1106 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001107}
1108
David Brazdil58282f42016-01-14 12:45:10 +00001109void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001110 // Blocked core registers:
1111 // lr : Runtime reserved.
1112 // tr : Runtime reserved.
1113 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1114 // ip1 : VIXL core temp.
1115 // ip0 : VIXL core temp.
1116 //
1117 // Blocked fp registers:
1118 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001119 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1120 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001121 while (!reserved_core_registers.IsEmpty()) {
1122 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
1123 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001124
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001125 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001126 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001127 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
1128 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001129
David Brazdil58282f42016-01-14 12:45:10 +00001130 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001131 // Stubs do not save callee-save floating point registers. If the graph
1132 // is debuggable, we need to deal with these registers differently. For
1133 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001134 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1135 while (!reserved_fp_registers_debuggable.IsEmpty()) {
1136 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().code()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001137 }
1138 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001139}
1140
Alexandre Rames3e69f162014-12-10 10:36:50 +00001141size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1142 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1143 __ Str(reg, MemOperand(sp, stack_index));
1144 return kArm64WordSize;
1145}
1146
1147size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1148 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1149 __ Ldr(reg, MemOperand(sp, stack_index));
1150 return kArm64WordSize;
1151}
1152
1153size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1154 FPRegister reg = FPRegister(reg_id, kDRegSize);
1155 __ Str(reg, MemOperand(sp, stack_index));
1156 return kArm64WordSize;
1157}
1158
1159size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1160 FPRegister reg = FPRegister(reg_id, kDRegSize);
1161 __ Ldr(reg, MemOperand(sp, stack_index));
1162 return kArm64WordSize;
1163}
1164
Alexandre Rames5319def2014-10-23 10:03:10 +01001165void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001166 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001167}
1168
1169void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001170 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001171}
1172
Alexandre Rames67555f72014-11-18 10:55:16 +00001173void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001174 if (constant->IsIntConstant()) {
1175 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1176 } else if (constant->IsLongConstant()) {
1177 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1178 } else if (constant->IsNullConstant()) {
1179 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001180 } else if (constant->IsFloatConstant()) {
1181 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1182 } else {
1183 DCHECK(constant->IsDoubleConstant());
1184 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1185 }
1186}
1187
Alexandre Rames3e69f162014-12-10 10:36:50 +00001188
1189static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1190 DCHECK(constant.IsConstant());
1191 HConstant* cst = constant.GetConstant();
1192 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001193 // Null is mapped to a core W register, which we associate with kPrimInt.
1194 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001195 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1196 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1197 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1198}
1199
Calin Juravlee460d1d2015-09-29 04:52:17 +01001200void CodeGeneratorARM64::MoveLocation(Location destination,
1201 Location source,
1202 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001203 if (source.Equals(destination)) {
1204 return;
1205 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001206
1207 // A valid move can always be inferred from the destination and source
1208 // locations. When moving from and to a register, the argument type can be
1209 // used to generate 32bit instead of 64bit moves. In debug mode we also
1210 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001211 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001212
1213 if (destination.IsRegister() || destination.IsFpuRegister()) {
1214 if (unspecified_type) {
1215 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1216 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001217 (src_cst != nullptr && (src_cst->IsIntConstant()
1218 || src_cst->IsFloatConstant()
1219 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001220 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001221 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001222 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001223 // If the source is a double stack slot or a 64bit constant, a 64bit
1224 // type is appropriate. Else the source is a register, and since the
1225 // type has not been specified, we chose a 64bit type to force a 64bit
1226 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001227 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001228 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001229 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001230 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1231 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1232 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001233 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1234 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1235 __ Ldr(dst, StackOperandFrom(source));
1236 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001237 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001238 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001239 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001240 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001241 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001242 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001243 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001244 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1245 ? Primitive::kPrimLong
1246 : Primitive::kPrimInt;
1247 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1248 }
1249 } else {
1250 DCHECK(source.IsFpuRegister());
1251 if (destination.IsRegister()) {
1252 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1253 ? Primitive::kPrimDouble
1254 : Primitive::kPrimFloat;
1255 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1256 } else {
1257 DCHECK(destination.IsFpuRegister());
1258 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001259 }
1260 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001261 } else { // The destination is not a register. It must be a stack slot.
1262 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1263 if (source.IsRegister() || source.IsFpuRegister()) {
1264 if (unspecified_type) {
1265 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001266 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001267 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001268 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001269 }
1270 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001271 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1272 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1273 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001274 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001275 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1276 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001277 UseScratchRegisterScope temps(GetVIXLAssembler());
1278 HConstant* src_cst = source.GetConstant();
1279 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001280 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001281 temp = temps.AcquireW();
1282 } else if (src_cst->IsLongConstant()) {
1283 temp = temps.AcquireX();
1284 } else if (src_cst->IsFloatConstant()) {
1285 temp = temps.AcquireS();
1286 } else {
1287 DCHECK(src_cst->IsDoubleConstant());
1288 temp = temps.AcquireD();
1289 }
1290 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001291 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001292 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001293 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001294 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001295 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001296 // There is generally less pressure on FP registers.
1297 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001298 __ Ldr(temp, StackOperandFrom(source));
1299 __ Str(temp, StackOperandFrom(destination));
1300 }
1301 }
1302}
1303
1304void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001305 CPURegister dst,
1306 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001307 switch (type) {
1308 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001309 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001310 break;
1311 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001312 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001313 break;
1314 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001315 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001316 break;
1317 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001318 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001319 break;
1320 case Primitive::kPrimInt:
1321 case Primitive::kPrimNot:
1322 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001323 case Primitive::kPrimFloat:
1324 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001325 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001326 __ Ldr(dst, src);
1327 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001328 case Primitive::kPrimVoid:
1329 LOG(FATAL) << "Unreachable type " << type;
1330 }
1331}
1332
Calin Juravle77520bc2015-01-12 18:45:46 +00001333void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001334 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001335 const MemOperand& src,
1336 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001337 MacroAssembler* masm = GetVIXLAssembler();
1338 BlockPoolsScope block_pools(masm);
1339 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001340 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001341 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001342
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001343 DCHECK(!src.IsPreIndex());
1344 DCHECK(!src.IsPostIndex());
1345
1346 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001347 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001348 MemOperand base = MemOperand(temp_base);
1349 switch (type) {
1350 case Primitive::kPrimBoolean:
1351 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001352 if (needs_null_check) {
1353 MaybeRecordImplicitNullCheck(instruction);
1354 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001355 break;
1356 case Primitive::kPrimByte:
1357 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001358 if (needs_null_check) {
1359 MaybeRecordImplicitNullCheck(instruction);
1360 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001361 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1362 break;
1363 case Primitive::kPrimChar:
1364 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001365 if (needs_null_check) {
1366 MaybeRecordImplicitNullCheck(instruction);
1367 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001368 break;
1369 case Primitive::kPrimShort:
1370 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001371 if (needs_null_check) {
1372 MaybeRecordImplicitNullCheck(instruction);
1373 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001374 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1375 break;
1376 case Primitive::kPrimInt:
1377 case Primitive::kPrimNot:
1378 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001379 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001380 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001381 if (needs_null_check) {
1382 MaybeRecordImplicitNullCheck(instruction);
1383 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001384 break;
1385 case Primitive::kPrimFloat:
1386 case Primitive::kPrimDouble: {
1387 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001388 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001389
1390 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1391 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001392 if (needs_null_check) {
1393 MaybeRecordImplicitNullCheck(instruction);
1394 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001395 __ Fmov(FPRegister(dst), temp);
1396 break;
1397 }
1398 case Primitive::kPrimVoid:
1399 LOG(FATAL) << "Unreachable type " << type;
1400 }
1401}
1402
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001403void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001404 CPURegister src,
1405 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001406 switch (type) {
1407 case Primitive::kPrimBoolean:
1408 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001409 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001410 break;
1411 case Primitive::kPrimChar:
1412 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001413 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001414 break;
1415 case Primitive::kPrimInt:
1416 case Primitive::kPrimNot:
1417 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001418 case Primitive::kPrimFloat:
1419 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001420 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001421 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001422 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001423 case Primitive::kPrimVoid:
1424 LOG(FATAL) << "Unreachable type " << type;
1425 }
1426}
1427
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001428void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1429 CPURegister src,
1430 const MemOperand& dst) {
1431 UseScratchRegisterScope temps(GetVIXLAssembler());
1432 Register temp_base = temps.AcquireX();
1433
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001434 DCHECK(!dst.IsPreIndex());
1435 DCHECK(!dst.IsPostIndex());
1436
1437 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001438 Operand op = OperandFromMemOperand(dst);
1439 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001440 MemOperand base = MemOperand(temp_base);
1441 switch (type) {
1442 case Primitive::kPrimBoolean:
1443 case Primitive::kPrimByte:
1444 __ Stlrb(Register(src), base);
1445 break;
1446 case Primitive::kPrimChar:
1447 case Primitive::kPrimShort:
1448 __ Stlrh(Register(src), base);
1449 break;
1450 case Primitive::kPrimInt:
1451 case Primitive::kPrimNot:
1452 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001453 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001454 __ Stlr(Register(src), base);
1455 break;
1456 case Primitive::kPrimFloat:
1457 case Primitive::kPrimDouble: {
1458 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001459 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001460
1461 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1462 __ Fmov(temp, FPRegister(src));
1463 __ Stlr(temp, base);
1464 break;
1465 }
1466 case Primitive::kPrimVoid:
1467 LOG(FATAL) << "Unreachable type " << type;
1468 }
1469}
1470
Calin Juravle175dc732015-08-25 15:42:32 +01001471void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1472 HInstruction* instruction,
1473 uint32_t dex_pc,
1474 SlowPathCode* slow_path) {
1475 InvokeRuntime(GetThreadOffset<kArm64WordSize>(entrypoint).Int32Value(),
1476 instruction,
1477 dex_pc,
1478 slow_path);
1479}
1480
Alexandre Rames67555f72014-11-18 10:55:16 +00001481void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1482 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001483 uint32_t dex_pc,
1484 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001485 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001486 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001487 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1488 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001489 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001490}
1491
1492void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1493 vixl::Register class_reg) {
1494 UseScratchRegisterScope temps(GetVIXLAssembler());
1495 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001496 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1497
Serban Constantinescu02164b32014-11-13 14:05:07 +00001498 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001499 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1500 __ Add(temp, class_reg, status_offset);
1501 __ Ldar(temp, HeapOperand(temp));
1502 __ Cmp(temp, mirror::Class::kStatusInitialized);
1503 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001504 __ Bind(slow_path->GetExitLabel());
1505}
Alexandre Rames5319def2014-10-23 10:03:10 +01001506
Roland Levillain44015862016-01-22 11:47:17 +00001507void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001508 BarrierType type = BarrierAll;
1509
1510 switch (kind) {
1511 case MemBarrierKind::kAnyAny:
1512 case MemBarrierKind::kAnyStore: {
1513 type = BarrierAll;
1514 break;
1515 }
1516 case MemBarrierKind::kLoadAny: {
1517 type = BarrierReads;
1518 break;
1519 }
1520 case MemBarrierKind::kStoreStore: {
1521 type = BarrierWrites;
1522 break;
1523 }
1524 default:
1525 LOG(FATAL) << "Unexpected memory barrier " << kind;
1526 }
1527 __ Dmb(InnerShareable, type);
1528}
1529
Serban Constantinescu02164b32014-11-13 14:05:07 +00001530void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1531 HBasicBlock* successor) {
1532 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001533 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1534 if (slow_path == nullptr) {
1535 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1536 instruction->SetSlowPath(slow_path);
1537 codegen_->AddSlowPath(slow_path);
1538 if (successor != nullptr) {
1539 DCHECK(successor->IsLoopHeader());
1540 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1541 }
1542 } else {
1543 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1544 }
1545
Serban Constantinescu02164b32014-11-13 14:05:07 +00001546 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1547 Register temp = temps.AcquireW();
1548
1549 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1550 if (successor == nullptr) {
1551 __ Cbnz(temp, slow_path->GetEntryLabel());
1552 __ Bind(slow_path->GetReturnLabel());
1553 } else {
1554 __ Cbz(temp, codegen_->GetLabelOf(successor));
1555 __ B(slow_path->GetEntryLabel());
1556 // slow_path will return to GetLabelOf(successor).
1557 }
1558}
1559
Alexandre Rames5319def2014-10-23 10:03:10 +01001560InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1561 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001562 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001563 assembler_(codegen->GetAssembler()),
1564 codegen_(codegen) {}
1565
1566#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001567 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001568
1569#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1570
1571enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001572 // Using a base helps identify when we hit such breakpoints.
1573 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001574#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1575 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1576#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1577};
1578
1579#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001580 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001581 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1582 } \
1583 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1584 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1585 locations->SetOut(Location::Any()); \
1586 }
1587 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1588#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1589
1590#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001591#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001592
Alexandre Rames67555f72014-11-18 10:55:16 +00001593void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001594 DCHECK_EQ(instr->InputCount(), 2U);
1595 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1596 Primitive::Type type = instr->GetResultType();
1597 switch (type) {
1598 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001599 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001600 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001601 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001602 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001603 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001604
1605 case Primitive::kPrimFloat:
1606 case Primitive::kPrimDouble:
1607 locations->SetInAt(0, Location::RequiresFpuRegister());
1608 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001609 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001610 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001611
Alexandre Rames5319def2014-10-23 10:03:10 +01001612 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001613 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001614 }
1615}
1616
Alexandre Rames09a99962015-04-15 11:47:56 +01001617void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001618 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1619
1620 bool object_field_get_with_read_barrier =
1621 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001622 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001623 new (GetGraph()->GetArena()) LocationSummary(instruction,
1624 object_field_get_with_read_barrier ?
1625 LocationSummary::kCallOnSlowPath :
1626 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001627 locations->SetInAt(0, Location::RequiresRegister());
1628 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1629 locations->SetOut(Location::RequiresFpuRegister());
1630 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001631 // The output overlaps for an object field get when read barriers
1632 // are enabled: we do not want the load to overwrite the object's
1633 // location, as we need it to emit the read barrier.
1634 locations->SetOut(
1635 Location::RequiresRegister(),
1636 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001637 }
1638}
1639
1640void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1641 const FieldInfo& field_info) {
1642 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001643 LocationSummary* locations = instruction->GetLocations();
1644 Location base_loc = locations->InAt(0);
1645 Location out = locations->Out();
1646 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001647 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001648 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001649 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001650
Roland Levillain44015862016-01-22 11:47:17 +00001651 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1652 // Object FieldGet with Baker's read barrier case.
1653 MacroAssembler* masm = GetVIXLAssembler();
1654 UseScratchRegisterScope temps(masm);
1655 // /* HeapReference<Object> */ out = *(base + offset)
1656 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1657 Register temp = temps.AcquireW();
1658 // Note that potential implicit null checks are handled in this
1659 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1660 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1661 instruction,
1662 out,
1663 base,
1664 offset,
1665 temp,
1666 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001667 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001668 } else {
1669 // General case.
1670 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001671 // Note that a potential implicit null check is handled in this
1672 // CodeGeneratorARM64::LoadAcquire call.
1673 // NB: LoadAcquire will record the pc info if needed.
1674 codegen_->LoadAcquire(
1675 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001676 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001677 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001678 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001679 }
Roland Levillain44015862016-01-22 11:47:17 +00001680 if (field_type == Primitive::kPrimNot) {
1681 // If read barriers are enabled, emit read barriers other than
1682 // Baker's using a slow path (and also unpoison the loaded
1683 // reference, if heap poisoning is enabled).
1684 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1685 }
Roland Levillain4d027112015-07-01 15:41:14 +01001686 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001687}
1688
1689void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1690 LocationSummary* locations =
1691 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1692 locations->SetInAt(0, Location::RequiresRegister());
1693 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1694 locations->SetInAt(1, Location::RequiresFpuRegister());
1695 } else {
1696 locations->SetInAt(1, Location::RequiresRegister());
1697 }
1698}
1699
1700void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001701 const FieldInfo& field_info,
1702 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001703 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001704 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001705
1706 Register obj = InputRegisterAt(instruction, 0);
1707 CPURegister value = InputCPURegisterAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001708 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001709 Offset offset = field_info.GetFieldOffset();
1710 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001711
Roland Levillain4d027112015-07-01 15:41:14 +01001712 {
1713 // We use a block to end the scratch scope before the write barrier, thus
1714 // freeing the temporary registers so they can be used in `MarkGCCard`.
1715 UseScratchRegisterScope temps(GetVIXLAssembler());
1716
1717 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1718 DCHECK(value.IsW());
1719 Register temp = temps.AcquireW();
1720 __ Mov(temp, value.W());
1721 GetAssembler()->PoisonHeapReference(temp.W());
1722 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001723 }
Roland Levillain4d027112015-07-01 15:41:14 +01001724
1725 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001726 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1727 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001728 } else {
1729 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1730 codegen_->MaybeRecordImplicitNullCheck(instruction);
1731 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001732 }
1733
1734 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001735 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001736 }
1737}
1738
Alexandre Rames67555f72014-11-18 10:55:16 +00001739void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001740 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001741
1742 switch (type) {
1743 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001744 case Primitive::kPrimLong: {
1745 Register dst = OutputRegister(instr);
1746 Register lhs = InputRegisterAt(instr, 0);
1747 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001748 if (instr->IsAdd()) {
1749 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001750 } else if (instr->IsAnd()) {
1751 __ And(dst, lhs, rhs);
1752 } else if (instr->IsOr()) {
1753 __ Orr(dst, lhs, rhs);
1754 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001755 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001756 } else if (instr->IsRor()) {
1757 if (rhs.IsImmediate()) {
1758 uint32_t shift = rhs.immediate() & (lhs.SizeInBits() - 1);
1759 __ Ror(dst, lhs, shift);
1760 } else {
1761 // Ensure shift distance is in the same size register as the result. If
1762 // we are rotating a long and the shift comes in a w register originally,
1763 // we don't need to sxtw for use as an x since the shift distances are
1764 // all & reg_bits - 1.
1765 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1766 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001767 } else {
1768 DCHECK(instr->IsXor());
1769 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001770 }
1771 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001772 }
1773 case Primitive::kPrimFloat:
1774 case Primitive::kPrimDouble: {
1775 FPRegister dst = OutputFPRegister(instr);
1776 FPRegister lhs = InputFPRegisterAt(instr, 0);
1777 FPRegister rhs = InputFPRegisterAt(instr, 1);
1778 if (instr->IsAdd()) {
1779 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001780 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001781 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001782 } else {
1783 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001784 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001785 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001786 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001787 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001788 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001789 }
1790}
1791
Serban Constantinescu02164b32014-11-13 14:05:07 +00001792void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1793 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1794
1795 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1796 Primitive::Type type = instr->GetResultType();
1797 switch (type) {
1798 case Primitive::kPrimInt:
1799 case Primitive::kPrimLong: {
1800 locations->SetInAt(0, Location::RequiresRegister());
1801 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1802 locations->SetOut(Location::RequiresRegister());
1803 break;
1804 }
1805 default:
1806 LOG(FATAL) << "Unexpected shift type " << type;
1807 }
1808}
1809
1810void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1811 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1812
1813 Primitive::Type type = instr->GetType();
1814 switch (type) {
1815 case Primitive::kPrimInt:
1816 case Primitive::kPrimLong: {
1817 Register dst = OutputRegister(instr);
1818 Register lhs = InputRegisterAt(instr, 0);
1819 Operand rhs = InputOperandAt(instr, 1);
1820 if (rhs.IsImmediate()) {
1821 uint32_t shift_value = (type == Primitive::kPrimInt)
1822 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1823 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1824 if (instr->IsShl()) {
1825 __ Lsl(dst, lhs, shift_value);
1826 } else if (instr->IsShr()) {
1827 __ Asr(dst, lhs, shift_value);
1828 } else {
1829 __ Lsr(dst, lhs, shift_value);
1830 }
1831 } else {
1832 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1833
1834 if (instr->IsShl()) {
1835 __ Lsl(dst, lhs, rhs_reg);
1836 } else if (instr->IsShr()) {
1837 __ Asr(dst, lhs, rhs_reg);
1838 } else {
1839 __ Lsr(dst, lhs, rhs_reg);
1840 }
1841 }
1842 break;
1843 }
1844 default:
1845 LOG(FATAL) << "Unexpected shift operation type " << type;
1846 }
1847}
1848
Alexandre Rames5319def2014-10-23 10:03:10 +01001849void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001850 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001851}
1852
1853void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001854 HandleBinaryOp(instruction);
1855}
1856
1857void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1858 HandleBinaryOp(instruction);
1859}
1860
1861void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1862 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001863}
1864
Artem Serov7fc63502016-02-09 17:15:29 +00001865void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001866 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1867 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1868 locations->SetInAt(0, Location::RequiresRegister());
1869 // There is no immediate variant of negated bitwise instructions in AArch64.
1870 locations->SetInAt(1, Location::RequiresRegister());
1871 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1872}
1873
Artem Serov7fc63502016-02-09 17:15:29 +00001874void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001875 Register dst = OutputRegister(instr);
1876 Register lhs = InputRegisterAt(instr, 0);
1877 Register rhs = InputRegisterAt(instr, 1);
1878
1879 switch (instr->GetOpKind()) {
1880 case HInstruction::kAnd:
1881 __ Bic(dst, lhs, rhs);
1882 break;
1883 case HInstruction::kOr:
1884 __ Orn(dst, lhs, rhs);
1885 break;
1886 case HInstruction::kXor:
1887 __ Eon(dst, lhs, rhs);
1888 break;
1889 default:
1890 LOG(FATAL) << "Unreachable";
1891 }
1892}
1893
Alexandre Rames8626b742015-11-25 16:28:08 +00001894void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1895 HArm64DataProcWithShifterOp* instruction) {
1896 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1897 instruction->GetType() == Primitive::kPrimLong);
1898 LocationSummary* locations =
1899 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1900 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1901 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1902 } else {
1903 locations->SetInAt(0, Location::RequiresRegister());
1904 }
1905 locations->SetInAt(1, Location::RequiresRegister());
1906 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1907}
1908
1909void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1910 HArm64DataProcWithShifterOp* instruction) {
1911 Primitive::Type type = instruction->GetType();
1912 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1913 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1914 Register out = OutputRegister(instruction);
1915 Register left;
1916 if (kind != HInstruction::kNeg) {
1917 left = InputRegisterAt(instruction, 0);
1918 }
1919 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1920 // shifter operand operation, the IR generating `right_reg` (input to the type
1921 // conversion) can have a different type from the current instruction's type,
1922 // so we manually indicate the type.
1923 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
1924 int64_t shift_amount = (type == Primitive::kPrimInt)
1925 ? static_cast<uint32_t>(instruction->GetShiftAmount() & kMaxIntShiftValue)
1926 : static_cast<uint32_t>(instruction->GetShiftAmount() & kMaxLongShiftValue);
1927
1928 Operand right_operand(0);
1929
1930 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1931 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1932 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1933 } else {
1934 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1935 }
1936
1937 // Logical binary operations do not support extension operations in the
1938 // operand. Note that VIXL would still manage if it was passed by generating
1939 // the extension as a separate instruction.
1940 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1941 DCHECK(!right_operand.IsExtendedRegister() ||
1942 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1943 kind != HInstruction::kNeg));
1944 switch (kind) {
1945 case HInstruction::kAdd:
1946 __ Add(out, left, right_operand);
1947 break;
1948 case HInstruction::kAnd:
1949 __ And(out, left, right_operand);
1950 break;
1951 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001952 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001953 __ Neg(out, right_operand);
1954 break;
1955 case HInstruction::kOr:
1956 __ Orr(out, left, right_operand);
1957 break;
1958 case HInstruction::kSub:
1959 __ Sub(out, left, right_operand);
1960 break;
1961 case HInstruction::kXor:
1962 __ Eor(out, left, right_operand);
1963 break;
1964 default:
1965 LOG(FATAL) << "Unexpected operation kind: " << kind;
1966 UNREACHABLE();
1967 }
1968}
1969
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001970void LocationsBuilderARM64::VisitArm64IntermediateAddress(HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001971 // The read barrier instrumentation does not support the
1972 // HArm64IntermediateAddress instruction yet.
1973 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001974 LocationSummary* locations =
1975 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1976 locations->SetInAt(0, Location::RequiresRegister());
1977 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1978 locations->SetOut(Location::RequiresRegister());
1979}
1980
1981void InstructionCodeGeneratorARM64::VisitArm64IntermediateAddress(
1982 HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001983 // The read barrier instrumentation does not support the
1984 // HArm64IntermediateAddress instruction yet.
1985 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001986 __ Add(OutputRegister(instruction),
1987 InputRegisterAt(instruction, 0),
1988 Operand(InputOperandAt(instruction, 1)));
1989}
1990
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001991void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001992 LocationSummary* locations =
1993 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001994 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
1995 if (instr->GetOpKind() == HInstruction::kSub &&
1996 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00001997 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001998 // Don't allocate register for Mneg instruction.
1999 } else {
2000 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2001 Location::RequiresRegister());
2002 }
2003 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2004 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002005 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2006}
2007
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002008void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002009 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002010 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2011 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002012
2013 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2014 // This fixup should be carried out for all multiply-accumulate instructions:
2015 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2016 if (instr->GetType() == Primitive::kPrimLong &&
2017 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2018 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
2019 vixl::Instruction* prev = masm->GetCursorAddress<vixl::Instruction*>() - vixl::kInstructionSize;
2020 if (prev->IsLoadOrStore()) {
2021 // Make sure we emit only exactly one nop.
2022 vixl::CodeBufferCheckScope scope(masm,
2023 vixl::kInstructionSize,
2024 vixl::CodeBufferCheckScope::kCheck,
2025 vixl::CodeBufferCheckScope::kExactSize);
2026 __ nop();
2027 }
2028 }
2029
2030 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002031 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002032 __ Madd(res, mul_left, mul_right, accumulator);
2033 } else {
2034 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002035 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002036 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002037 __ Mneg(res, mul_left, mul_right);
2038 } else {
2039 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2040 __ Msub(res, mul_left, mul_right, accumulator);
2041 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002042 }
2043}
2044
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002045void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002046 bool object_array_get_with_read_barrier =
2047 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002048 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002049 new (GetGraph()->GetArena()) LocationSummary(instruction,
2050 object_array_get_with_read_barrier ?
2051 LocationSummary::kCallOnSlowPath :
2052 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002053 locations->SetInAt(0, Location::RequiresRegister());
2054 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002055 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2056 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2057 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002058 // The output overlaps in the case of an object array get with
2059 // read barriers enabled: we do not want the move to overwrite the
2060 // array's location, as we need it to emit the read barrier.
2061 locations->SetOut(
2062 Location::RequiresRegister(),
2063 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002064 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002065}
2066
2067void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002068 Primitive::Type type = instruction->GetType();
2069 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002070 LocationSummary* locations = instruction->GetLocations();
2071 Location index = locations->InAt(1);
2072 uint32_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Roland Levillain44015862016-01-22 11:47:17 +00002073 Location out = locations->Out();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002074
Alexandre Ramesd921d642015-04-16 15:07:16 +01002075 MacroAssembler* masm = GetVIXLAssembler();
2076 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002077 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002078 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002079
Roland Levillain44015862016-01-22 11:47:17 +00002080 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2081 // Object ArrayGet with Baker's read barrier case.
2082 Register temp = temps.AcquireW();
2083 // The read barrier instrumentation does not support the
2084 // HArm64IntermediateAddress instruction yet.
2085 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
2086 // Note that a potential implicit null check is handled in the
2087 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2088 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2089 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002090 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002091 // General case.
2092 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002093 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002094 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2095 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002096 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002097 Register temp = temps.AcquireSameSizeAs(obj);
2098 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
2099 // The read barrier instrumentation does not support the
2100 // HArm64IntermediateAddress instruction yet.
2101 DCHECK(!kEmitCompilerReadBarrier);
2102 // We do not need to compute the intermediate address from the array: the
2103 // input instruction has done it already. See the comment in
2104 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2105 if (kIsDebugBuild) {
2106 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2107 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2108 }
2109 temp = obj;
2110 } else {
2111 __ Add(temp, obj, offset);
2112 }
2113 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2114 }
2115
2116 codegen_->Load(type, OutputCPURegister(instruction), source);
2117 codegen_->MaybeRecordImplicitNullCheck(instruction);
2118
2119 if (type == Primitive::kPrimNot) {
2120 static_assert(
2121 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2122 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2123 Location obj_loc = locations->InAt(0);
2124 if (index.IsConstant()) {
2125 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2126 } else {
2127 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2128 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002129 }
Roland Levillain4d027112015-07-01 15:41:14 +01002130 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002131}
2132
Alexandre Rames5319def2014-10-23 10:03:10 +01002133void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2134 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2135 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002136 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002137}
2138
2139void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01002140 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01002141 __ Ldr(OutputRegister(instruction),
2142 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
Calin Juravle77520bc2015-01-12 18:45:46 +00002143 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002144}
2145
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002146void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002147 Primitive::Type value_type = instruction->GetComponentType();
2148
2149 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2150 bool object_array_set_with_read_barrier =
2151 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002152 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2153 instruction,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002154 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
2155 LocationSummary::kCallOnSlowPath :
2156 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002157 locations->SetInAt(0, Location::RequiresRegister());
2158 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002159 if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002160 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002161 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002162 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002163 }
2164}
2165
2166void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2167 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002168 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002169 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002170 bool needs_write_barrier =
2171 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002172
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002173 Register array = InputRegisterAt(instruction, 0);
2174 CPURegister value = InputCPURegisterAt(instruction, 2);
2175 CPURegister source = value;
2176 Location index = locations->InAt(1);
2177 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2178 MemOperand destination = HeapOperand(array);
2179 MacroAssembler* masm = GetVIXLAssembler();
2180 BlockPoolsScope block_pools(masm);
2181
2182 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002183 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002184 if (index.IsConstant()) {
2185 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2186 destination = HeapOperand(array, offset);
2187 } else {
2188 UseScratchRegisterScope temps(masm);
2189 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002190 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00002191 // The read barrier instrumentation does not support the
2192 // HArm64IntermediateAddress instruction yet.
2193 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002194 // We do not need to compute the intermediate address from the array: the
2195 // input instruction has done it already. See the comment in
2196 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2197 if (kIsDebugBuild) {
2198 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2199 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2200 }
2201 temp = array;
2202 } else {
2203 __ Add(temp, array, offset);
2204 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002205 destination = HeapOperand(temp,
2206 XRegisterFrom(index),
2207 LSL,
2208 Primitive::ComponentSizeShift(value_type));
2209 }
2210 codegen_->Store(value_type, value, destination);
2211 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002212 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002213 DCHECK(needs_write_barrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002214 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002215 vixl::Label done;
2216 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002217 {
2218 // We use a block to end the scratch scope before the write barrier, thus
2219 // freeing the temporary registers so they can be used in `MarkGCCard`.
2220 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002221 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002222 if (index.IsConstant()) {
2223 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002224 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002225 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002226 destination = HeapOperand(temp,
2227 XRegisterFrom(index),
2228 LSL,
2229 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002230 }
2231
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002232 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2233 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2234 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2235
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002236 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002237 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2238 codegen_->AddSlowPath(slow_path);
2239 if (instruction->GetValueCanBeNull()) {
2240 vixl::Label non_zero;
2241 __ Cbnz(Register(value), &non_zero);
2242 if (!index.IsConstant()) {
2243 __ Add(temp, array, offset);
2244 }
2245 __ Str(wzr, destination);
2246 codegen_->MaybeRecordImplicitNullCheck(instruction);
2247 __ B(&done);
2248 __ Bind(&non_zero);
2249 }
2250
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002251 if (kEmitCompilerReadBarrier) {
2252 // When read barriers are enabled, the type checking
2253 // instrumentation requires two read barriers:
2254 //
2255 // __ Mov(temp2, temp);
2256 // // /* HeapReference<Class> */ temp = temp->component_type_
2257 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002258 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002259 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2260 //
2261 // // /* HeapReference<Class> */ temp2 = value->klass_
2262 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002263 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002264 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2265 //
2266 // __ Cmp(temp, temp2);
2267 //
2268 // However, the second read barrier may trash `temp`, as it
2269 // is a temporary register, and as such would not be saved
2270 // along with live registers before calling the runtime (nor
2271 // restored afterwards). So in this case, we bail out and
2272 // delegate the work to the array set slow path.
2273 //
2274 // TODO: Extend the register allocator to support a new
2275 // "(locally) live temp" location so as to avoid always
2276 // going into the slow path when read barriers are enabled.
2277 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002278 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002279 Register temp2 = temps.AcquireSameSizeAs(array);
2280 // /* HeapReference<Class> */ temp = array->klass_
2281 __ Ldr(temp, HeapOperand(array, class_offset));
2282 codegen_->MaybeRecordImplicitNullCheck(instruction);
2283 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2284
2285 // /* HeapReference<Class> */ temp = temp->component_type_
2286 __ Ldr(temp, HeapOperand(temp, component_offset));
2287 // /* HeapReference<Class> */ temp2 = value->klass_
2288 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2289 // If heap poisoning is enabled, no need to unpoison `temp`
2290 // nor `temp2`, as we are comparing two poisoned references.
2291 __ Cmp(temp, temp2);
2292
2293 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2294 vixl::Label do_put;
2295 __ B(eq, &do_put);
2296 // If heap poisoning is enabled, the `temp` reference has
2297 // not been unpoisoned yet; unpoison it now.
2298 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2299
2300 // /* HeapReference<Class> */ temp = temp->super_class_
2301 __ Ldr(temp, HeapOperand(temp, super_offset));
2302 // If heap poisoning is enabled, no need to unpoison
2303 // `temp`, as we are comparing against null below.
2304 __ Cbnz(temp, slow_path->GetEntryLabel());
2305 __ Bind(&do_put);
2306 } else {
2307 __ B(ne, slow_path->GetEntryLabel());
2308 }
2309 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002310 }
2311 }
2312
2313 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002314 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002315 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002316 __ Mov(temp2, value.W());
2317 GetAssembler()->PoisonHeapReference(temp2);
2318 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002319 }
2320
2321 if (!index.IsConstant()) {
2322 __ Add(temp, array, offset);
2323 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002324 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002325
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002326 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002327 codegen_->MaybeRecordImplicitNullCheck(instruction);
2328 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002329 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002330
2331 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2332
2333 if (done.IsLinked()) {
2334 __ Bind(&done);
2335 }
2336
2337 if (slow_path != nullptr) {
2338 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002339 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002340 }
2341}
2342
Alexandre Rames67555f72014-11-18 10:55:16 +00002343void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002344 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2345 ? LocationSummary::kCallOnSlowPath
2346 : LocationSummary::kNoCall;
2347 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002348 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002349 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002350 if (instruction->HasUses()) {
2351 locations->SetOut(Location::SameAsFirstInput());
2352 }
2353}
2354
2355void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002356 BoundsCheckSlowPathARM64* slow_path =
2357 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002358 codegen_->AddSlowPath(slow_path);
2359
2360 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2361 __ B(slow_path->GetEntryLabel(), hs);
2362}
2363
Alexandre Rames67555f72014-11-18 10:55:16 +00002364void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2365 LocationSummary* locations =
2366 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2367 locations->SetInAt(0, Location::RequiresRegister());
2368 if (check->HasUses()) {
2369 locations->SetOut(Location::SameAsFirstInput());
2370 }
2371}
2372
2373void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2374 // We assume the class is not null.
2375 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2376 check->GetLoadClass(), check, check->GetDexPc(), true);
2377 codegen_->AddSlowPath(slow_path);
2378 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2379}
2380
Roland Levillain1a653882016-03-18 18:05:57 +00002381static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2382 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2383 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2384}
2385
2386void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2387 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2388 Location rhs_loc = instruction->GetLocations()->InAt(1);
2389 if (rhs_loc.IsConstant()) {
2390 // 0.0 is the only immediate that can be encoded directly in
2391 // an FCMP instruction.
2392 //
2393 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2394 // specify that in a floating-point comparison, positive zero
2395 // and negative zero are considered equal, so we can use the
2396 // literal 0.0 for both cases here.
2397 //
2398 // Note however that some methods (Float.equal, Float.compare,
2399 // Float.compareTo, Double.equal, Double.compare,
2400 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2401 // StrictMath.min) consider 0.0 to be (strictly) greater than
2402 // -0.0. So if we ever translate calls to these methods into a
2403 // HCompare instruction, we must handle the -0.0 case with
2404 // care here.
2405 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2406 __ Fcmp(lhs_reg, 0.0);
2407 } else {
2408 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2409 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002410}
2411
Serban Constantinescu02164b32014-11-13 14:05:07 +00002412void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002413 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002414 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2415 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002416 switch (in_type) {
Aart Bika19616e2016-02-01 18:57:58 -08002417 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002418 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002419 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002420 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002421 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2422 break;
2423 }
2424 case Primitive::kPrimFloat:
2425 case Primitive::kPrimDouble: {
2426 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002427 locations->SetInAt(1,
2428 IsFloatingPointZeroConstant(compare->InputAt(1))
2429 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2430 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002431 locations->SetOut(Location::RequiresRegister());
2432 break;
2433 }
2434 default:
2435 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2436 }
2437}
2438
2439void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2440 Primitive::Type in_type = compare->InputAt(0)->GetType();
2441
2442 // 0 if: left == right
2443 // 1 if: left > right
2444 // -1 if: left < right
2445 switch (in_type) {
Aart Bika19616e2016-02-01 18:57:58 -08002446 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002447 case Primitive::kPrimLong: {
2448 Register result = OutputRegister(compare);
2449 Register left = InputRegisterAt(compare, 0);
2450 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002451 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002452 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2453 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002454 break;
2455 }
2456 case Primitive::kPrimFloat:
2457 case Primitive::kPrimDouble: {
2458 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002459 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002460 __ Cset(result, ne);
2461 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002462 break;
2463 }
2464 default:
2465 LOG(FATAL) << "Unimplemented compare type " << in_type;
2466 }
2467}
2468
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002469void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002470 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002471
2472 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2473 locations->SetInAt(0, Location::RequiresFpuRegister());
2474 locations->SetInAt(1,
2475 IsFloatingPointZeroConstant(instruction->InputAt(1))
2476 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2477 : Location::RequiresFpuRegister());
2478 } else {
2479 // Integer cases.
2480 locations->SetInAt(0, Location::RequiresRegister());
2481 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2482 }
2483
David Brazdilb3e773e2016-01-26 11:28:37 +00002484 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002485 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002486 }
2487}
2488
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002489void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002490 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002491 return;
2492 }
2493
2494 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002495 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002496 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002497
Roland Levillain7f63c522015-07-13 15:54:55 +00002498 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002499 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002500 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002501 } else {
2502 // Integer cases.
2503 Register lhs = InputRegisterAt(instruction, 0);
2504 Operand rhs = InputOperandAt(instruction, 1);
2505 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002506 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002507 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002508}
2509
2510#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2511 M(Equal) \
2512 M(NotEqual) \
2513 M(LessThan) \
2514 M(LessThanOrEqual) \
2515 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002516 M(GreaterThanOrEqual) \
2517 M(Below) \
2518 M(BelowOrEqual) \
2519 M(Above) \
2520 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002521#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002522void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2523void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002524FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002525#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002526#undef FOR_EACH_CONDITION_INSTRUCTION
2527
Zheng Xuc6667102015-05-15 16:08:45 +08002528void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2529 DCHECK(instruction->IsDiv() || instruction->IsRem());
2530
2531 LocationSummary* locations = instruction->GetLocations();
2532 Location second = locations->InAt(1);
2533 DCHECK(second.IsConstant());
2534
2535 Register out = OutputRegister(instruction);
2536 Register dividend = InputRegisterAt(instruction, 0);
2537 int64_t imm = Int64FromConstant(second.GetConstant());
2538 DCHECK(imm == 1 || imm == -1);
2539
2540 if (instruction->IsRem()) {
2541 __ Mov(out, 0);
2542 } else {
2543 if (imm == 1) {
2544 __ Mov(out, dividend);
2545 } else {
2546 __ Neg(out, dividend);
2547 }
2548 }
2549}
2550
2551void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2552 DCHECK(instruction->IsDiv() || instruction->IsRem());
2553
2554 LocationSummary* locations = instruction->GetLocations();
2555 Location second = locations->InAt(1);
2556 DCHECK(second.IsConstant());
2557
2558 Register out = OutputRegister(instruction);
2559 Register dividend = InputRegisterAt(instruction, 0);
2560 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002561 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002562 int ctz_imm = CTZ(abs_imm);
2563
2564 UseScratchRegisterScope temps(GetVIXLAssembler());
2565 Register temp = temps.AcquireSameSizeAs(out);
2566
2567 if (instruction->IsDiv()) {
2568 __ Add(temp, dividend, abs_imm - 1);
2569 __ Cmp(dividend, 0);
2570 __ Csel(out, temp, dividend, lt);
2571 if (imm > 0) {
2572 __ Asr(out, out, ctz_imm);
2573 } else {
2574 __ Neg(out, Operand(out, ASR, ctz_imm));
2575 }
2576 } else {
2577 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2578 __ Asr(temp, dividend, bits - 1);
2579 __ Lsr(temp, temp, bits - ctz_imm);
2580 __ Add(out, dividend, temp);
2581 __ And(out, out, abs_imm - 1);
2582 __ Sub(out, out, temp);
2583 }
2584}
2585
2586void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2587 DCHECK(instruction->IsDiv() || instruction->IsRem());
2588
2589 LocationSummary* locations = instruction->GetLocations();
2590 Location second = locations->InAt(1);
2591 DCHECK(second.IsConstant());
2592
2593 Register out = OutputRegister(instruction);
2594 Register dividend = InputRegisterAt(instruction, 0);
2595 int64_t imm = Int64FromConstant(second.GetConstant());
2596
2597 Primitive::Type type = instruction->GetResultType();
2598 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2599
2600 int64_t magic;
2601 int shift;
2602 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2603
2604 UseScratchRegisterScope temps(GetVIXLAssembler());
2605 Register temp = temps.AcquireSameSizeAs(out);
2606
2607 // temp = get_high(dividend * magic)
2608 __ Mov(temp, magic);
2609 if (type == Primitive::kPrimLong) {
2610 __ Smulh(temp, dividend, temp);
2611 } else {
2612 __ Smull(temp.X(), dividend, temp);
2613 __ Lsr(temp.X(), temp.X(), 32);
2614 }
2615
2616 if (imm > 0 && magic < 0) {
2617 __ Add(temp, temp, dividend);
2618 } else if (imm < 0 && magic > 0) {
2619 __ Sub(temp, temp, dividend);
2620 }
2621
2622 if (shift != 0) {
2623 __ Asr(temp, temp, shift);
2624 }
2625
2626 if (instruction->IsDiv()) {
2627 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2628 } else {
2629 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2630 // TODO: Strength reduction for msub.
2631 Register temp_imm = temps.AcquireSameSizeAs(out);
2632 __ Mov(temp_imm, imm);
2633 __ Msub(out, temp, temp_imm, dividend);
2634 }
2635}
2636
2637void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2638 DCHECK(instruction->IsDiv() || instruction->IsRem());
2639 Primitive::Type type = instruction->GetResultType();
2640 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2641
2642 LocationSummary* locations = instruction->GetLocations();
2643 Register out = OutputRegister(instruction);
2644 Location second = locations->InAt(1);
2645
2646 if (second.IsConstant()) {
2647 int64_t imm = Int64FromConstant(second.GetConstant());
2648
2649 if (imm == 0) {
2650 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2651 } else if (imm == 1 || imm == -1) {
2652 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002653 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002654 DivRemByPowerOfTwo(instruction);
2655 } else {
2656 DCHECK(imm <= -2 || imm >= 2);
2657 GenerateDivRemWithAnyConstant(instruction);
2658 }
2659 } else {
2660 Register dividend = InputRegisterAt(instruction, 0);
2661 Register divisor = InputRegisterAt(instruction, 1);
2662 if (instruction->IsDiv()) {
2663 __ Sdiv(out, dividend, divisor);
2664 } else {
2665 UseScratchRegisterScope temps(GetVIXLAssembler());
2666 Register temp = temps.AcquireSameSizeAs(out);
2667 __ Sdiv(temp, dividend, divisor);
2668 __ Msub(out, temp, divisor, dividend);
2669 }
2670 }
2671}
2672
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002673void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2674 LocationSummary* locations =
2675 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2676 switch (div->GetResultType()) {
2677 case Primitive::kPrimInt:
2678 case Primitive::kPrimLong:
2679 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002680 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002681 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2682 break;
2683
2684 case Primitive::kPrimFloat:
2685 case Primitive::kPrimDouble:
2686 locations->SetInAt(0, Location::RequiresFpuRegister());
2687 locations->SetInAt(1, Location::RequiresFpuRegister());
2688 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2689 break;
2690
2691 default:
2692 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2693 }
2694}
2695
2696void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2697 Primitive::Type type = div->GetResultType();
2698 switch (type) {
2699 case Primitive::kPrimInt:
2700 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002701 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002702 break;
2703
2704 case Primitive::kPrimFloat:
2705 case Primitive::kPrimDouble:
2706 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2707 break;
2708
2709 default:
2710 LOG(FATAL) << "Unexpected div type " << type;
2711 }
2712}
2713
Alexandre Rames67555f72014-11-18 10:55:16 +00002714void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002715 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2716 ? LocationSummary::kCallOnSlowPath
2717 : LocationSummary::kNoCall;
2718 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002719 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2720 if (instruction->HasUses()) {
2721 locations->SetOut(Location::SameAsFirstInput());
2722 }
2723}
2724
2725void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2726 SlowPathCodeARM64* slow_path =
2727 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2728 codegen_->AddSlowPath(slow_path);
2729 Location value = instruction->GetLocations()->InAt(0);
2730
Alexandre Rames3e69f162014-12-10 10:36:50 +00002731 Primitive::Type type = instruction->GetType();
2732
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002733 if (!Primitive::IsIntegralType(type)) {
2734 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002735 return;
2736 }
2737
Alexandre Rames67555f72014-11-18 10:55:16 +00002738 if (value.IsConstant()) {
2739 int64_t divisor = Int64ConstantFrom(value);
2740 if (divisor == 0) {
2741 __ B(slow_path->GetEntryLabel());
2742 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002743 // A division by a non-null constant is valid. We don't need to perform
2744 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002745 }
2746 } else {
2747 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2748 }
2749}
2750
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002751void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2752 LocationSummary* locations =
2753 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2754 locations->SetOut(Location::ConstantLocation(constant));
2755}
2756
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002757void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2758 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002759 // Will be generated at use site.
2760}
2761
Alexandre Rames5319def2014-10-23 10:03:10 +01002762void LocationsBuilderARM64::VisitExit(HExit* exit) {
2763 exit->SetLocations(nullptr);
2764}
2765
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002766void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002767}
2768
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002769void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2770 LocationSummary* locations =
2771 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2772 locations->SetOut(Location::ConstantLocation(constant));
2773}
2774
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002775void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002776 // Will be generated at use site.
2777}
2778
David Brazdilfc6a86a2015-06-26 10:33:45 +00002779void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002780 DCHECK(!successor->IsExitBlock());
2781 HBasicBlock* block = got->GetBlock();
2782 HInstruction* previous = got->GetPrevious();
2783 HLoopInformation* info = block->GetLoopInformation();
2784
David Brazdil46e2a392015-03-16 17:31:52 +00002785 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002786 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2787 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2788 return;
2789 }
2790 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2791 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2792 }
2793 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002794 __ B(codegen_->GetLabelOf(successor));
2795 }
2796}
2797
David Brazdilfc6a86a2015-06-26 10:33:45 +00002798void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2799 got->SetLocations(nullptr);
2800}
2801
2802void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2803 HandleGoto(got, got->GetSuccessor());
2804}
2805
2806void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2807 try_boundary->SetLocations(nullptr);
2808}
2809
2810void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2811 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2812 if (!successor->IsExitBlock()) {
2813 HandleGoto(try_boundary, successor);
2814 }
2815}
2816
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002817void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002818 size_t condition_input_index,
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002819 vixl::Label* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00002820 vixl::Label* false_target) {
2821 // FP branching requires both targets to be explicit. If either of the targets
2822 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
2823 vixl::Label fallthrough_target;
2824 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002825
David Brazdil0debae72015-11-12 18:37:00 +00002826 if (true_target == nullptr && false_target == nullptr) {
2827 // Nothing to do. The code always falls through.
2828 return;
2829 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002830 // Constant condition, statically compared against "true" (integer value 1).
2831 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002832 if (true_target != nullptr) {
2833 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002834 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002835 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002836 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002837 if (false_target != nullptr) {
2838 __ B(false_target);
2839 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002840 }
David Brazdil0debae72015-11-12 18:37:00 +00002841 return;
2842 }
2843
2844 // The following code generates these patterns:
2845 // (1) true_target == nullptr && false_target != nullptr
2846 // - opposite condition true => branch to false_target
2847 // (2) true_target != nullptr && false_target == nullptr
2848 // - condition true => branch to true_target
2849 // (3) true_target != nullptr && false_target != nullptr
2850 // - condition true => branch to true_target
2851 // - branch to false_target
2852 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002853 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002854 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002855 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002856 if (true_target == nullptr) {
2857 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2858 } else {
2859 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2860 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002861 } else {
2862 // The condition instruction has not been materialized, use its inputs as
2863 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002864 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002865
David Brazdil0debae72015-11-12 18:37:00 +00002866 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002867 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002868 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002869 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002870 IfCondition opposite_condition = condition->GetOppositeCondition();
2871 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002872 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002873 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002874 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002875 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002876 // Integer cases.
2877 Register lhs = InputRegisterAt(condition, 0);
2878 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002879
2880 Condition arm64_cond;
2881 vixl::Label* non_fallthrough_target;
2882 if (true_target == nullptr) {
2883 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2884 non_fallthrough_target = false_target;
2885 } else {
2886 arm64_cond = ARM64Condition(condition->GetCondition());
2887 non_fallthrough_target = true_target;
2888 }
2889
Aart Bik086d27e2016-01-20 17:02:00 -08002890 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
2891 rhs.IsImmediate() && (rhs.immediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002892 switch (arm64_cond) {
2893 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002894 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002895 break;
2896 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002897 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002898 break;
2899 case lt:
2900 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002901 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002902 break;
2903 case ge:
2904 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002905 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002906 break;
2907 default:
2908 // Without the `static_cast` the compiler throws an error for
2909 // `-Werror=sign-promo`.
2910 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2911 }
2912 } else {
2913 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002914 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002915 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002916 }
2917 }
David Brazdil0debae72015-11-12 18:37:00 +00002918
2919 // If neither branch falls through (case 3), the conditional branch to `true_target`
2920 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2921 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002922 __ B(false_target);
2923 }
David Brazdil0debae72015-11-12 18:37:00 +00002924
2925 if (fallthrough_target.IsLinked()) {
2926 __ Bind(&fallthrough_target);
2927 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002928}
2929
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002930void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2931 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002932 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002933 locations->SetInAt(0, Location::RequiresRegister());
2934 }
2935}
2936
2937void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002938 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2939 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
2940 vixl::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2941 nullptr : codegen_->GetLabelOf(true_successor);
2942 vixl::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2943 nullptr : codegen_->GetLabelOf(false_successor);
2944 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002945}
2946
2947void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2948 LocationSummary* locations = new (GetGraph()->GetArena())
2949 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002950 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002951 locations->SetInAt(0, Location::RequiresRegister());
2952 }
2953}
2954
2955void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002956 SlowPathCodeARM64* slow_path =
2957 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002958 GenerateTestAndBranch(deoptimize,
2959 /* condition_input_index */ 0,
2960 slow_path->GetEntryLabel(),
2961 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002962}
2963
David Brazdilc0b601b2016-02-08 14:20:45 +00002964enum SelectVariant {
2965 kCsel,
2966 kCselFalseConst,
2967 kCselTrueConst,
2968 kFcsel,
2969};
2970
2971static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2972 return condition->IsCondition() &&
2973 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2974}
2975
2976static inline bool IsRecognizedCselConstant(HInstruction* constant) {
2977 if (constant->IsConstant()) {
2978 int64_t value = Int64FromConstant(constant->AsConstant());
2979 if ((value == -1) || (value == 0) || (value == 1)) {
2980 return true;
2981 }
2982 }
2983 return false;
2984}
2985
2986static inline SelectVariant GetSelectVariant(HSelect* select) {
2987 if (Primitive::IsFloatingPointType(select->GetType())) {
2988 return kFcsel;
2989 } else if (IsRecognizedCselConstant(select->GetFalseValue())) {
2990 return kCselFalseConst;
2991 } else if (IsRecognizedCselConstant(select->GetTrueValue())) {
2992 return kCselTrueConst;
2993 } else {
2994 return kCsel;
2995 }
2996}
2997
2998static inline bool HasSwappedInputs(SelectVariant variant) {
2999 return variant == kCselTrueConst;
3000}
3001
3002static inline Condition GetConditionForSelect(HCondition* condition, SelectVariant variant) {
3003 IfCondition cond = HasSwappedInputs(variant) ? condition->GetOppositeCondition()
3004 : condition->GetCondition();
3005 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3006 : ARM64Condition(cond);
3007}
3008
David Brazdil74eb1b22015-12-14 11:44:01 +00003009void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3010 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
David Brazdilc0b601b2016-02-08 14:20:45 +00003011 switch (GetSelectVariant(select)) {
3012 case kCsel:
3013 locations->SetInAt(0, Location::RequiresRegister());
3014 locations->SetInAt(1, Location::RequiresRegister());
3015 locations->SetOut(Location::RequiresRegister());
3016 break;
3017 case kCselFalseConst:
3018 locations->SetInAt(0, Location::ConstantLocation(select->InputAt(0)->AsConstant()));
3019 locations->SetInAt(1, Location::RequiresRegister());
3020 locations->SetOut(Location::RequiresRegister());
3021 break;
3022 case kCselTrueConst:
3023 locations->SetInAt(0, Location::RequiresRegister());
3024 locations->SetInAt(1, Location::ConstantLocation(select->InputAt(1)->AsConstant()));
3025 locations->SetOut(Location::RequiresRegister());
3026 break;
3027 case kFcsel:
3028 locations->SetInAt(0, Location::RequiresFpuRegister());
3029 locations->SetInAt(1, Location::RequiresFpuRegister());
3030 locations->SetOut(Location::RequiresFpuRegister());
3031 break;
David Brazdil74eb1b22015-12-14 11:44:01 +00003032 }
3033 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3034 locations->SetInAt(2, Location::RequiresRegister());
3035 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003036}
3037
3038void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003039 HInstruction* cond = select->GetCondition();
3040 SelectVariant variant = GetSelectVariant(select);
3041 Condition csel_cond;
3042
3043 if (IsBooleanValueOrMaterializedCondition(cond)) {
3044 if (cond->IsCondition() && cond->GetNext() == select) {
3045 // Condition codes set from previous instruction.
3046 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3047 } else {
3048 __ Cmp(InputRegisterAt(select, 2), 0);
3049 csel_cond = HasSwappedInputs(variant) ? eq : ne;
3050 }
3051 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003052 GenerateFcmp(cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003053 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3054 } else {
3055 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
3056 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3057 }
3058
3059 switch (variant) {
3060 case kCsel:
3061 case kCselFalseConst:
3062 __ Csel(OutputRegister(select),
3063 InputRegisterAt(select, 1),
3064 InputOperandAt(select, 0),
3065 csel_cond);
3066 break;
3067 case kCselTrueConst:
3068 __ Csel(OutputRegister(select),
3069 InputRegisterAt(select, 0),
3070 InputOperandAt(select, 1),
3071 csel_cond);
3072 break;
3073 case kFcsel:
3074 __ Fcsel(OutputFPRegister(select),
3075 InputFPRegisterAt(select, 1),
3076 InputFPRegisterAt(select, 0),
3077 csel_cond);
3078 break;
3079 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003080}
3081
David Srbecky0cf44932015-12-09 14:09:59 +00003082void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3083 new (GetGraph()->GetArena()) LocationSummary(info);
3084}
3085
3086void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
David Srbeckyc7098ff2016-02-09 14:30:11 +00003087 codegen_->MaybeRecordNativeDebugInfo(info, info->GetDexPc());
3088}
3089
3090void CodeGeneratorARM64::GenerateNop() {
3091 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003092}
3093
Alexandre Rames5319def2014-10-23 10:03:10 +01003094void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003095 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003096}
3097
3098void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003099 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003100}
3101
3102void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003103 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003104}
3105
3106void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003107 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003108}
3109
Roland Levillain44015862016-01-22 11:47:17 +00003110static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3111 return kEmitCompilerReadBarrier &&
3112 (kUseBakerReadBarrier ||
3113 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3114 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3115 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3116}
3117
Alexandre Rames67555f72014-11-18 10:55:16 +00003118void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003119 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003120 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3121 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003122 case TypeCheckKind::kExactCheck:
3123 case TypeCheckKind::kAbstractClassCheck:
3124 case TypeCheckKind::kClassHierarchyCheck:
3125 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003126 call_kind =
3127 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003128 break;
3129 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003130 case TypeCheckKind::kUnresolvedCheck:
3131 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003132 call_kind = LocationSummary::kCallOnSlowPath;
3133 break;
3134 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003135
Alexandre Rames67555f72014-11-18 10:55:16 +00003136 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003137 locations->SetInAt(0, Location::RequiresRegister());
3138 locations->SetInAt(1, Location::RequiresRegister());
3139 // The "out" register is used as a temporary, so it overlaps with the inputs.
3140 // Note that TypeCheckSlowPathARM64 uses this register too.
3141 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3142 // When read barriers are enabled, we need a temporary register for
3143 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003144 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003145 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003146 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003147}
3148
3149void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003150 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003151 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003152 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003153 Register obj = InputRegisterAt(instruction, 0);
3154 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003155 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003156 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003157 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3158 locations->GetTemp(0) :
3159 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003160 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3161 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3162 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3163 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003164
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003165 vixl::Label done, zero;
3166 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003167
3168 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003169 // Avoid null check if we know `obj` is not null.
3170 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003171 __ Cbz(obj, &zero);
3172 }
3173
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003174 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003175 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003176
Roland Levillain44015862016-01-22 11:47:17 +00003177 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003178 case TypeCheckKind::kExactCheck: {
3179 __ Cmp(out, cls);
3180 __ Cset(out, eq);
3181 if (zero.IsLinked()) {
3182 __ B(&done);
3183 }
3184 break;
3185 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003186
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003187 case TypeCheckKind::kAbstractClassCheck: {
3188 // If the class is abstract, we eagerly fetch the super class of the
3189 // object to avoid doing a comparison we know will fail.
3190 vixl::Label loop, success;
3191 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003192 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003193 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003194 // If `out` is null, we use it for the result, and jump to `done`.
3195 __ Cbz(out, &done);
3196 __ Cmp(out, cls);
3197 __ B(ne, &loop);
3198 __ Mov(out, 1);
3199 if (zero.IsLinked()) {
3200 __ B(&done);
3201 }
3202 break;
3203 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003204
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003205 case TypeCheckKind::kClassHierarchyCheck: {
3206 // Walk over the class hierarchy to find a match.
3207 vixl::Label loop, success;
3208 __ Bind(&loop);
3209 __ Cmp(out, cls);
3210 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003211 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003212 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003213 __ Cbnz(out, &loop);
3214 // If `out` is null, we use it for the result, and jump to `done`.
3215 __ B(&done);
3216 __ Bind(&success);
3217 __ Mov(out, 1);
3218 if (zero.IsLinked()) {
3219 __ B(&done);
3220 }
3221 break;
3222 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003223
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003224 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003225 // Do an exact check.
3226 vixl::Label exact_check;
3227 __ Cmp(out, cls);
3228 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003229 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003230 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003231 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003232 // If `out` is null, we use it for the result, and jump to `done`.
3233 __ Cbz(out, &done);
3234 __ Ldrh(out, HeapOperand(out, primitive_offset));
3235 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3236 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003237 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003238 __ Mov(out, 1);
3239 __ B(&done);
3240 break;
3241 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003242
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003243 case TypeCheckKind::kArrayCheck: {
3244 __ Cmp(out, cls);
3245 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003246 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3247 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003248 codegen_->AddSlowPath(slow_path);
3249 __ B(ne, slow_path->GetEntryLabel());
3250 __ Mov(out, 1);
3251 if (zero.IsLinked()) {
3252 __ B(&done);
3253 }
3254 break;
3255 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003256
Calin Juravle98893e12015-10-02 21:05:03 +01003257 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003258 case TypeCheckKind::kInterfaceCheck: {
3259 // Note that we indeed only call on slow path, but we always go
3260 // into the slow path for the unresolved and interface check
3261 // cases.
3262 //
3263 // We cannot directly call the InstanceofNonTrivial runtime
3264 // entry point without resorting to a type checking slow path
3265 // here (i.e. by calling InvokeRuntime directly), as it would
3266 // require to assign fixed registers for the inputs of this
3267 // HInstanceOf instruction (following the runtime calling
3268 // convention), which might be cluttered by the potential first
3269 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003270 //
3271 // TODO: Introduce a new runtime entry point taking the object
3272 // to test (instead of its class) as argument, and let it deal
3273 // with the read barrier issues. This will let us refactor this
3274 // case of the `switch` code as it was previously (with a direct
3275 // call to the runtime not using a type checking slow path).
3276 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003277 DCHECK(locations->OnlyCallsOnSlowPath());
3278 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3279 /* is_fatal */ false);
3280 codegen_->AddSlowPath(slow_path);
3281 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003282 if (zero.IsLinked()) {
3283 __ B(&done);
3284 }
3285 break;
3286 }
3287 }
3288
3289 if (zero.IsLinked()) {
3290 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003291 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003292 }
3293
3294 if (done.IsLinked()) {
3295 __ Bind(&done);
3296 }
3297
3298 if (slow_path != nullptr) {
3299 __ Bind(slow_path->GetExitLabel());
3300 }
3301}
3302
3303void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3304 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3305 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3306
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003307 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3308 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003309 case TypeCheckKind::kExactCheck:
3310 case TypeCheckKind::kAbstractClassCheck:
3311 case TypeCheckKind::kClassHierarchyCheck:
3312 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003313 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3314 LocationSummary::kCallOnSlowPath :
3315 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003316 break;
3317 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003318 case TypeCheckKind::kUnresolvedCheck:
3319 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003320 call_kind = LocationSummary::kCallOnSlowPath;
3321 break;
3322 }
3323
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003324 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3325 locations->SetInAt(0, Location::RequiresRegister());
3326 locations->SetInAt(1, Location::RequiresRegister());
3327 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3328 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003329 // When read barriers are enabled, we need an additional temporary
3330 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003331 if (TypeCheckNeedsATemporary(type_check_kind)) {
3332 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003333 }
3334}
3335
3336void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003337 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003338 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003339 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003340 Register obj = InputRegisterAt(instruction, 0);
3341 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003342 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003343 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3344 locations->GetTemp(1) :
3345 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003346 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003347 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3348 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3349 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3350 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003351
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003352 bool is_type_check_slow_path_fatal =
3353 (type_check_kind == TypeCheckKind::kExactCheck ||
3354 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3355 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3356 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3357 !instruction->CanThrowIntoCatchBlock();
3358 SlowPathCodeARM64* type_check_slow_path =
3359 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3360 is_type_check_slow_path_fatal);
3361 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003362
3363 vixl::Label done;
3364 // Avoid null check if we know obj is not null.
3365 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003366 __ Cbz(obj, &done);
3367 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003368
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003369 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003370 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003371
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003372 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003373 case TypeCheckKind::kExactCheck:
3374 case TypeCheckKind::kArrayCheck: {
3375 __ Cmp(temp, cls);
3376 // Jump to slow path for throwing the exception or doing a
3377 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003378 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003379 break;
3380 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003381
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003382 case TypeCheckKind::kAbstractClassCheck: {
3383 // If the class is abstract, we eagerly fetch the super class of the
3384 // object to avoid doing a comparison we know will fail.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003385 vixl::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003386 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003387 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003388 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003389
3390 // If the class reference currently in `temp` is not null, jump
3391 // to the `compare_classes` label to compare it with the checked
3392 // class.
3393 __ Cbnz(temp, &compare_classes);
3394 // Otherwise, jump to the slow path to throw the exception.
3395 //
3396 // But before, move back the object's class into `temp` before
3397 // going into the slow path, as it has been overwritten in the
3398 // meantime.
3399 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003400 GenerateReferenceLoadTwoRegisters(
3401 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003402 __ B(type_check_slow_path->GetEntryLabel());
3403
3404 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003405 __ Cmp(temp, cls);
3406 __ B(ne, &loop);
3407 break;
3408 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003409
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003410 case TypeCheckKind::kClassHierarchyCheck: {
3411 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003412 vixl::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003413 __ Bind(&loop);
3414 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003415 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003416
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003417 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003418 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003419
3420 // If the class reference currently in `temp` is not null, jump
3421 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003422 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003423 // Otherwise, jump to the slow path to throw the exception.
3424 //
3425 // But before, move back the object's class into `temp` before
3426 // going into the slow path, as it has been overwritten in the
3427 // meantime.
3428 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003429 GenerateReferenceLoadTwoRegisters(
3430 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003431 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003432 break;
3433 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003434
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003435 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003436 // Do an exact check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003437 vixl::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003438 __ Cmp(temp, cls);
3439 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003440
3441 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003442 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003443 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003444
3445 // If the component type is not null (i.e. the object is indeed
3446 // an array), jump to label `check_non_primitive_component_type`
3447 // to further check that this component type is not a primitive
3448 // type.
3449 __ Cbnz(temp, &check_non_primitive_component_type);
3450 // Otherwise, jump to the slow path to throw the exception.
3451 //
3452 // But before, move back the object's class into `temp` before
3453 // going into the slow path, as it has been overwritten in the
3454 // meantime.
3455 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003456 GenerateReferenceLoadTwoRegisters(
3457 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003458 __ B(type_check_slow_path->GetEntryLabel());
3459
3460 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003461 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3462 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003463 __ Cbz(temp, &done);
3464 // Same comment as above regarding `temp` and the slow path.
3465 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003466 GenerateReferenceLoadTwoRegisters(
3467 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003468 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003469 break;
3470 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003471
Calin Juravle98893e12015-10-02 21:05:03 +01003472 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003473 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003474 // We always go into the type check slow path for the unresolved
3475 // and interface check cases.
3476 //
3477 // We cannot directly call the CheckCast runtime entry point
3478 // without resorting to a type checking slow path here (i.e. by
3479 // calling InvokeRuntime directly), as it would require to
3480 // assign fixed registers for the inputs of this HInstanceOf
3481 // instruction (following the runtime calling convention), which
3482 // might be cluttered by the potential first read barrier
3483 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003484 //
3485 // TODO: Introduce a new runtime entry point taking the object
3486 // to test (instead of its class) as argument, and let it deal
3487 // with the read barrier issues. This will let us refactor this
3488 // case of the `switch` code as it was previously (with a direct
3489 // call to the runtime not using a type checking slow path).
3490 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003491 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003492 break;
3493 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003494 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003495
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003496 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003497}
3498
Alexandre Rames5319def2014-10-23 10:03:10 +01003499void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3500 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3501 locations->SetOut(Location::ConstantLocation(constant));
3502}
3503
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003504void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003505 // Will be generated at use site.
3506}
3507
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003508void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3509 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3510 locations->SetOut(Location::ConstantLocation(constant));
3511}
3512
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003513void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003514 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003515}
3516
Calin Juravle175dc732015-08-25 15:42:32 +01003517void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3518 // The trampoline uses the same calling convention as dex calling conventions,
3519 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3520 // the method_idx.
3521 HandleInvoke(invoke);
3522}
3523
3524void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3525 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3526}
3527
Alexandre Rames5319def2014-10-23 10:03:10 +01003528void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003529 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003530 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003531}
3532
Alexandre Rames67555f72014-11-18 10:55:16 +00003533void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3534 HandleInvoke(invoke);
3535}
3536
3537void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3538 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003539 LocationSummary* locations = invoke->GetLocations();
3540 Register temp = XRegisterFrom(locations->GetTemp(0));
Mathieu Chartiere401d142015-04-22 13:56:20 -07003541 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
3542 invoke->GetImtIndex() % mirror::Class::kImtSize, kArm64PointerSize).Uint32Value();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003543 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003544 Offset class_offset = mirror::Object::ClassOffset();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003545 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003546
3547 // The register ip1 is required to be used for the hidden argument in
3548 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003549 MacroAssembler* masm = GetVIXLAssembler();
3550 UseScratchRegisterScope scratch_scope(masm);
3551 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003552 scratch_scope.Exclude(ip1);
3553 __ Mov(ip1, invoke->GetDexMethodIndex());
3554
Alexandre Rames67555f72014-11-18 10:55:16 +00003555 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003556 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003557 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003558 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003559 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003560 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003561 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003562 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003563 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003564 // Instead of simply (possibly) unpoisoning `temp` here, we should
3565 // emit a read barrier for the previous class reference load.
3566 // However this is not required in practice, as this is an
3567 // intermediate/temporary reference and because the current
3568 // concurrent copying collector keeps the from-space memory
3569 // intact/accessible until the end of the marking phase (the
3570 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003571 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Alexandre Rames67555f72014-11-18 10:55:16 +00003572 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003573 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003574 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003575 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003576 // lr();
3577 __ Blr(lr);
3578 DCHECK(!codegen_->IsLeafMethod());
3579 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3580}
3581
3582void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003583 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3584 if (intrinsic.TryDispatch(invoke)) {
3585 return;
3586 }
3587
Alexandre Rames67555f72014-11-18 10:55:16 +00003588 HandleInvoke(invoke);
3589}
3590
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003591void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003592 // Explicit clinit checks triggered by static invokes must have been pruned by
3593 // art::PrepareForRegisterAllocation.
3594 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003595
Andreas Gampe878d58c2015-01-15 23:24:00 -08003596 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3597 if (intrinsic.TryDispatch(invoke)) {
3598 return;
3599 }
3600
Alexandre Rames67555f72014-11-18 10:55:16 +00003601 HandleInvoke(invoke);
3602}
3603
Andreas Gampe878d58c2015-01-15 23:24:00 -08003604static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3605 if (invoke->GetLocations()->Intrinsified()) {
3606 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3607 intrinsic.Dispatch(invoke);
3608 return true;
3609 }
3610 return false;
3611}
3612
Vladimir Markodc151b22015-10-15 18:02:30 +01003613HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3614 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3615 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003616 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003617 return desired_dispatch_info;
3618}
3619
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003620void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003621 // For better instruction scheduling we load the direct code pointer before the method pointer.
3622 bool direct_code_loaded = false;
3623 switch (invoke->GetCodePtrLocation()) {
3624 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3625 // LR = code address from literal pool with link-time patch.
3626 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3627 direct_code_loaded = true;
3628 break;
3629 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3630 // LR = invoke->GetDirectCodePtr();
3631 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3632 direct_code_loaded = true;
3633 break;
3634 default:
3635 break;
3636 }
3637
Andreas Gampe878d58c2015-01-15 23:24:00 -08003638 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003639 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3640 switch (invoke->GetMethodLoadKind()) {
3641 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3642 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003643 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003644 break;
3645 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003646 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003647 break;
3648 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3649 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003650 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003651 break;
3652 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3653 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003654 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003655 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3656 break;
3657 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3658 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Marko0f7dca42015-11-02 14:36:43 +00003659 pc_relative_dex_cache_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
3660 invoke->GetDexCacheArrayOffset());
3661 vixl::Label* pc_insn_label = &pc_relative_dex_cache_patches_.back().label;
Vladimir Marko58155012015-08-19 12:49:41 +00003662 {
3663 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003664 __ Bind(pc_insn_label);
3665 __ adrp(XRegisterFrom(temp), 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003666 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +00003667 pc_relative_dex_cache_patches_.back().pc_insn_label = pc_insn_label;
Vladimir Marko58155012015-08-19 12:49:41 +00003668 // Add LDR with its PC-relative DexCache access patch.
Vladimir Marko0f7dca42015-11-02 14:36:43 +00003669 pc_relative_dex_cache_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
3670 invoke->GetDexCacheArrayOffset());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003671 {
3672 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
3673 __ Bind(&pc_relative_dex_cache_patches_.back().label);
3674 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), 0));
3675 pc_relative_dex_cache_patches_.back().pc_insn_label = pc_insn_label;
3676 }
Vladimir Marko58155012015-08-19 12:49:41 +00003677 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003678 }
Vladimir Marko58155012015-08-19 12:49:41 +00003679 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003680 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003681 Register reg = XRegisterFrom(temp);
3682 Register method_reg;
3683 if (current_method.IsRegister()) {
3684 method_reg = XRegisterFrom(current_method);
3685 } else {
3686 DCHECK(invoke->GetLocations()->Intrinsified());
3687 DCHECK(!current_method.IsValid());
3688 method_reg = reg;
3689 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3690 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003691
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003692 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003693 __ Ldr(reg.X(),
3694 MemOperand(method_reg.X(),
3695 ArtMethod::DexCacheResolvedMethodsOffset(kArm64WordSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003696 // temp = temp[index_in_cache];
3697 uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index;
3698 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3699 break;
3700 }
3701 }
3702
3703 switch (invoke->GetCodePtrLocation()) {
3704 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3705 __ Bl(&frame_entry_label_);
3706 break;
3707 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3708 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
3709 vixl::Label* label = &relative_call_patches_.back().label;
Alexandre Rames6dc01742015-11-12 14:44:19 +00003710 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
3711 __ Bind(label);
3712 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003713 break;
3714 }
3715 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3716 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3717 // LR prepared above for better instruction scheduling.
3718 DCHECK(direct_code_loaded);
3719 // lr()
3720 __ Blr(lr);
3721 break;
3722 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3723 // LR = callee_method->entry_point_from_quick_compiled_code_;
3724 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003725 XRegisterFrom(callee_method),
Vladimir Marko58155012015-08-19 12:49:41 +00003726 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value()));
3727 // lr()
3728 __ Blr(lr);
3729 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003730 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003731
Andreas Gampe878d58c2015-01-15 23:24:00 -08003732 DCHECK(!IsLeafMethod());
3733}
3734
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003735void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003736 // Use the calling convention instead of the location of the receiver, as
3737 // intrinsics may have put the receiver in a different register. In the intrinsics
3738 // slow path, the arguments have been moved to the right place, so here we are
3739 // guaranteed that the receiver is the first register of the calling convention.
3740 InvokeDexCallingConvention calling_convention;
3741 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003742 Register temp = XRegisterFrom(temp_in);
3743 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3744 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3745 Offset class_offset = mirror::Object::ClassOffset();
3746 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
3747
3748 BlockPoolsScope block_pools(GetVIXLAssembler());
3749
3750 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003751 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003752 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003753 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003754 // Instead of simply (possibly) unpoisoning `temp` here, we should
3755 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003756 // intermediate/temporary reference and because the current
3757 // concurrent copying collector keeps the from-space memory
3758 // intact/accessible until the end of the marking phase (the
3759 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003760 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3761 // temp = temp->GetMethodAt(method_offset);
3762 __ Ldr(temp, MemOperand(temp, method_offset));
3763 // lr = temp->GetEntryPoint();
3764 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3765 // lr();
3766 __ Blr(lr);
3767}
3768
Vladimir Marko58155012015-08-19 12:49:41 +00003769void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3770 DCHECK(linker_patches->empty());
3771 size_t size =
3772 method_patches_.size() +
3773 call_patches_.size() +
3774 relative_call_patches_.size() +
Vladimir Marko0f7dca42015-11-02 14:36:43 +00003775 pc_relative_dex_cache_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003776 linker_patches->reserve(size);
3777 for (const auto& entry : method_patches_) {
3778 const MethodReference& target_method = entry.first;
3779 vixl::Literal<uint64_t>* literal = entry.second;
3780 linker_patches->push_back(LinkerPatch::MethodPatch(literal->offset(),
3781 target_method.dex_file,
3782 target_method.dex_method_index));
3783 }
3784 for (const auto& entry : call_patches_) {
3785 const MethodReference& target_method = entry.first;
3786 vixl::Literal<uint64_t>* literal = entry.second;
3787 linker_patches->push_back(LinkerPatch::CodePatch(literal->offset(),
3788 target_method.dex_file,
3789 target_method.dex_method_index));
3790 }
3791 for (const MethodPatchInfo<vixl::Label>& info : relative_call_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003792 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003793 info.target_method.dex_file,
3794 info.target_method.dex_method_index));
3795 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +00003796 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003797 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003798 &info.target_dex_file,
Alexandre Rames6dc01742015-11-12 14:44:19 +00003799 info.pc_insn_label->location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003800 info.element_offset));
3801 }
3802}
3803
3804vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
3805 // Look up the literal for value.
3806 auto lb = uint64_literals_.lower_bound(value);
3807 if (lb != uint64_literals_.end() && !uint64_literals_.key_comp()(value, lb->first)) {
3808 return lb->second;
3809 }
3810 // We don't have a literal for this value, insert a new one.
3811 vixl::Literal<uint64_t>* literal = __ CreateLiteralDestroyedWithPool<uint64_t>(value);
3812 uint64_literals_.PutBefore(lb, value, literal);
3813 return literal;
3814}
3815
3816vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
3817 MethodReference target_method,
3818 MethodToLiteralMap* map) {
3819 // Look up the literal for target_method.
3820 auto lb = map->lower_bound(target_method);
3821 if (lb != map->end() && !map->key_comp()(target_method, lb->first)) {
3822 return lb->second;
3823 }
3824 // We don't have a literal for this method yet, insert a new one.
3825 vixl::Literal<uint64_t>* literal = __ CreateLiteralDestroyedWithPool<uint64_t>(0u);
3826 map->PutBefore(lb, target_method, literal);
3827 return literal;
3828}
3829
3830vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
3831 MethodReference target_method) {
3832 return DeduplicateMethodLiteral(target_method, &method_patches_);
3833}
3834
3835vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
3836 MethodReference target_method) {
3837 return DeduplicateMethodLiteral(target_method, &call_patches_);
3838}
3839
3840
Andreas Gampe878d58c2015-01-15 23:24:00 -08003841void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003842 // Explicit clinit checks triggered by static invokes must have been pruned by
3843 // art::PrepareForRegisterAllocation.
3844 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003845
Andreas Gampe878d58c2015-01-15 23:24:00 -08003846 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3847 return;
3848 }
3849
Alexandre Ramesd921d642015-04-16 15:07:16 +01003850 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003851 LocationSummary* locations = invoke->GetLocations();
3852 codegen_->GenerateStaticOrDirectCall(
3853 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003854 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003855}
3856
3857void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003858 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3859 return;
3860 }
3861
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003862 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003863 DCHECK(!codegen_->IsLeafMethod());
3864 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3865}
3866
Alexandre Rames67555f72014-11-18 10:55:16 +00003867void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003868 InvokeRuntimeCallingConvention calling_convention;
3869 CodeGenerator::CreateLoadClassLocationSummary(
3870 cls,
3871 LocationFrom(calling_convention.GetRegisterAt(0)),
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003872 LocationFrom(vixl::x0),
3873 /* code_generator_supports_read_barrier */ true);
Alexandre Rames67555f72014-11-18 10:55:16 +00003874}
3875
3876void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003877 if (cls->NeedsAccessCheck()) {
3878 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
3879 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
3880 cls,
3881 cls->GetDexPc(),
3882 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003883 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003884 return;
3885 }
3886
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003887 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01003888 Register out = OutputRegister(cls);
3889 Register current_method = InputRegisterAt(cls, 0);
3890 if (cls->IsReferrersClass()) {
Alexandre Rames67555f72014-11-18 10:55:16 +00003891 DCHECK(!cls->CanCallRuntime());
3892 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain44015862016-01-22 11:47:17 +00003893 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3894 GenerateGcRootFieldLoad(
3895 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Alexandre Rames67555f72014-11-18 10:55:16 +00003896 } else {
Vladimir Marko05792b92015-08-03 11:56:49 +01003897 MemberOffset resolved_types_offset = ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003898 // /* GcRoot<mirror::Class>[] */ out =
3899 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
Vladimir Marko05792b92015-08-03 11:56:49 +01003900 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00003901 // /* GcRoot<mirror::Class> */ out = out[type_index]
3902 GenerateGcRootFieldLoad(
3903 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003904
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00003905 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
3906 DCHECK(cls->CanCallRuntime());
3907 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
3908 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3909 codegen_->AddSlowPath(slow_path);
3910 if (!cls->IsInDexCache()) {
3911 __ Cbz(out, slow_path->GetEntryLabel());
3912 }
3913 if (cls->MustGenerateClinitCheck()) {
3914 GenerateClassInitializationCheck(slow_path, out);
3915 } else {
3916 __ Bind(slow_path->GetExitLabel());
3917 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003918 }
3919 }
3920}
3921
David Brazdilcb1c0552015-08-04 16:22:25 +01003922static MemOperand GetExceptionTlsAddress() {
3923 return MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
3924}
3925
Alexandre Rames67555f72014-11-18 10:55:16 +00003926void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
3927 LocationSummary* locations =
3928 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3929 locations->SetOut(Location::RequiresRegister());
3930}
3931
3932void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01003933 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
3934}
3935
3936void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
3937 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3938}
3939
3940void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
3941 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00003942}
3943
Alexandre Rames5319def2014-10-23 10:03:10 +01003944void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
3945 load->SetLocations(nullptr);
3946}
3947
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003948void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003949 // Nothing to do, this is driven by the code generator.
3950}
3951
Alexandre Rames67555f72014-11-18 10:55:16 +00003952void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray917d0162015-11-24 18:25:35 +00003953 LocationSummary::CallKind call_kind = (!load->IsInDexCache() || kEmitCompilerReadBarrier)
3954 ? LocationSummary::kCallOnSlowPath
3955 : LocationSummary::kNoCall;
3956 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01003957 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00003958 locations->SetOut(Location::RequiresRegister());
3959}
3960
3961void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003962 Location out_loc = load->GetLocations()->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003963 Register out = OutputRegister(load);
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01003964 Register current_method = InputRegisterAt(load, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003965
Roland Levillain44015862016-01-22 11:47:17 +00003966 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3967 GenerateGcRootFieldLoad(
3968 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003969 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
3970 __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00003971 // /* GcRoot<mirror::String> */ out = out[string_index]
3972 GenerateGcRootFieldLoad(
3973 load, out_loc, out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003974
Nicolas Geoffray917d0162015-11-24 18:25:35 +00003975 if (!load->IsInDexCache()) {
3976 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
3977 codegen_->AddSlowPath(slow_path);
3978 __ Cbz(out, slow_path->GetEntryLabel());
3979 __ Bind(slow_path->GetExitLabel());
3980 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003981}
3982
Alexandre Rames5319def2014-10-23 10:03:10 +01003983void LocationsBuilderARM64::VisitLocal(HLocal* local) {
3984 local->SetLocations(nullptr);
3985}
3986
3987void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
3988 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
3989}
3990
3991void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
3992 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3993 locations->SetOut(Location::ConstantLocation(constant));
3994}
3995
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003996void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003997 // Will be generated at use site.
3998}
3999
Alexandre Rames67555f72014-11-18 10:55:16 +00004000void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4001 LocationSummary* locations =
4002 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4003 InvokeRuntimeCallingConvention calling_convention;
4004 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4005}
4006
4007void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4008 codegen_->InvokeRuntime(instruction->IsEnter()
4009 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
4010 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004011 instruction->GetDexPc(),
4012 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004013 if (instruction->IsEnter()) {
4014 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4015 } else {
4016 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4017 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004018}
4019
Alexandre Rames42d641b2014-10-27 14:00:51 +00004020void LocationsBuilderARM64::VisitMul(HMul* mul) {
4021 LocationSummary* locations =
4022 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4023 switch (mul->GetResultType()) {
4024 case Primitive::kPrimInt:
4025 case Primitive::kPrimLong:
4026 locations->SetInAt(0, Location::RequiresRegister());
4027 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004028 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004029 break;
4030
4031 case Primitive::kPrimFloat:
4032 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004033 locations->SetInAt(0, Location::RequiresFpuRegister());
4034 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004035 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004036 break;
4037
4038 default:
4039 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4040 }
4041}
4042
4043void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4044 switch (mul->GetResultType()) {
4045 case Primitive::kPrimInt:
4046 case Primitive::kPrimLong:
4047 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4048 break;
4049
4050 case Primitive::kPrimFloat:
4051 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004052 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004053 break;
4054
4055 default:
4056 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4057 }
4058}
4059
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004060void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4061 LocationSummary* locations =
4062 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4063 switch (neg->GetResultType()) {
4064 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004065 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004066 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004067 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004068 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004069
4070 case Primitive::kPrimFloat:
4071 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004072 locations->SetInAt(0, Location::RequiresFpuRegister());
4073 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004074 break;
4075
4076 default:
4077 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4078 }
4079}
4080
4081void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4082 switch (neg->GetResultType()) {
4083 case Primitive::kPrimInt:
4084 case Primitive::kPrimLong:
4085 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4086 break;
4087
4088 case Primitive::kPrimFloat:
4089 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004090 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004091 break;
4092
4093 default:
4094 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4095 }
4096}
4097
4098void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4099 LocationSummary* locations =
4100 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4101 InvokeRuntimeCallingConvention calling_convention;
4102 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004103 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004104 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004105 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004106}
4107
4108void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4109 LocationSummary* locations = instruction->GetLocations();
4110 InvokeRuntimeCallingConvention calling_convention;
4111 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4112 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004113 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004114 // Note: if heap poisoning is enabled, the entry point takes cares
4115 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01004116 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4117 instruction,
4118 instruction->GetDexPc(),
4119 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004120 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004121}
4122
Alexandre Rames5319def2014-10-23 10:03:10 +01004123void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4124 LocationSummary* locations =
4125 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4126 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004127 if (instruction->IsStringAlloc()) {
4128 locations->AddTemp(LocationFrom(kArtMethodRegister));
4129 } else {
4130 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4131 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4132 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004133 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4134}
4135
4136void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004137 // Note: if heap poisoning is enabled, the entry point takes cares
4138 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004139 if (instruction->IsStringAlloc()) {
4140 // String is allocated through StringFactory. Call NewEmptyString entry point.
4141 Location temp = instruction->GetLocations()->GetTemp(0);
4142 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
4143 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4144 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4145 __ Blr(lr);
4146 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4147 } else {
4148 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4149 instruction,
4150 instruction->GetDexPc(),
4151 nullptr);
4152 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4153 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004154}
4155
4156void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4157 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004158 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004159 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004160}
4161
4162void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004163 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004164 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004165 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004166 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004167 break;
4168
4169 default:
4170 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4171 }
4172}
4173
David Brazdil66d126e2015-04-03 16:02:44 +01004174void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4175 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4176 locations->SetInAt(0, Location::RequiresRegister());
4177 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4178}
4179
4180void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01004181 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
4182}
4183
Alexandre Rames5319def2014-10-23 10:03:10 +01004184void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004185 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4186 ? LocationSummary::kCallOnSlowPath
4187 : LocationSummary::kNoCall;
4188 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004189 locations->SetInAt(0, Location::RequiresRegister());
4190 if (instruction->HasUses()) {
4191 locations->SetOut(Location::SameAsFirstInput());
4192 }
4193}
4194
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004195void InstructionCodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004196 if (codegen_->CanMoveNullCheckToUser(instruction)) {
4197 return;
4198 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004199
Alexandre Ramesd921d642015-04-16 15:07:16 +01004200 BlockPoolsScope block_pools(GetVIXLAssembler());
4201 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004202 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
4203 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4204}
4205
4206void InstructionCodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004207 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
4208 codegen_->AddSlowPath(slow_path);
4209
4210 LocationSummary* locations = instruction->GetLocations();
4211 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004212
4213 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004214}
4215
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004216void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004217 if (codegen_->IsImplicitNullCheckAllowed(instruction)) {
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004218 GenerateImplicitNullCheck(instruction);
4219 } else {
4220 GenerateExplicitNullCheck(instruction);
4221 }
4222}
4223
Alexandre Rames67555f72014-11-18 10:55:16 +00004224void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4225 HandleBinaryOp(instruction);
4226}
4227
4228void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4229 HandleBinaryOp(instruction);
4230}
4231
Alexandre Rames3e69f162014-12-10 10:36:50 +00004232void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4233 LOG(FATAL) << "Unreachable";
4234}
4235
4236void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4237 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4238}
4239
Alexandre Rames5319def2014-10-23 10:03:10 +01004240void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4241 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4242 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4243 if (location.IsStackSlot()) {
4244 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4245 } else if (location.IsDoubleStackSlot()) {
4246 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4247 }
4248 locations->SetOut(location);
4249}
4250
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004251void InstructionCodeGeneratorARM64::VisitParameterValue(
4252 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004253 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004254}
4255
4256void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4257 LocationSummary* locations =
4258 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004259 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004260}
4261
4262void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4263 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4264 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004265}
4266
4267void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4268 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4269 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
4270 locations->SetInAt(i, Location::Any());
4271 }
4272 locations->SetOut(Location::Any());
4273}
4274
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004275void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004276 LOG(FATAL) << "Unreachable";
4277}
4278
Serban Constantinescu02164b32014-11-13 14:05:07 +00004279void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004280 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004281 LocationSummary::CallKind call_kind =
4282 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004283 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4284
4285 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004286 case Primitive::kPrimInt:
4287 case Primitive::kPrimLong:
4288 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004289 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004290 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4291 break;
4292
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004293 case Primitive::kPrimFloat:
4294 case Primitive::kPrimDouble: {
4295 InvokeRuntimeCallingConvention calling_convention;
4296 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4297 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4298 locations->SetOut(calling_convention.GetReturnLocation(type));
4299
4300 break;
4301 }
4302
Serban Constantinescu02164b32014-11-13 14:05:07 +00004303 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004304 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004305 }
4306}
4307
4308void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4309 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004310
Serban Constantinescu02164b32014-11-13 14:05:07 +00004311 switch (type) {
4312 case Primitive::kPrimInt:
4313 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004314 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004315 break;
4316 }
4317
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004318 case Primitive::kPrimFloat:
4319 case Primitive::kPrimDouble: {
4320 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
4321 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004322 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004323 if (type == Primitive::kPrimFloat) {
4324 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4325 } else {
4326 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4327 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004328 break;
4329 }
4330
Serban Constantinescu02164b32014-11-13 14:05:07 +00004331 default:
4332 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004333 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004334 }
4335}
4336
Calin Juravle27df7582015-04-17 19:12:31 +01004337void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4338 memory_barrier->SetLocations(nullptr);
4339}
4340
4341void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004342 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004343}
4344
Alexandre Rames5319def2014-10-23 10:03:10 +01004345void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4346 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4347 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004348 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004349}
4350
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004351void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004352 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004353}
4354
4355void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4356 instruction->SetLocations(nullptr);
4357}
4358
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004359void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004360 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004361}
4362
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004363void LocationsBuilderARM64::VisitRor(HRor* ror) {
4364 HandleBinaryOp(ror);
4365}
4366
4367void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4368 HandleBinaryOp(ror);
4369}
4370
Serban Constantinescu02164b32014-11-13 14:05:07 +00004371void LocationsBuilderARM64::VisitShl(HShl* shl) {
4372 HandleShift(shl);
4373}
4374
4375void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4376 HandleShift(shl);
4377}
4378
4379void LocationsBuilderARM64::VisitShr(HShr* shr) {
4380 HandleShift(shr);
4381}
4382
4383void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4384 HandleShift(shr);
4385}
4386
Alexandre Rames5319def2014-10-23 10:03:10 +01004387void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
4388 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
4389 Primitive::Type field_type = store->InputAt(1)->GetType();
4390 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004391 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01004392 case Primitive::kPrimBoolean:
4393 case Primitive::kPrimByte:
4394 case Primitive::kPrimChar:
4395 case Primitive::kPrimShort:
4396 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004397 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01004398 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
4399 break;
4400
4401 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004402 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01004403 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
4404 break;
4405
4406 default:
4407 LOG(FATAL) << "Unimplemented local type " << field_type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004408 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +01004409 }
4410}
4411
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004412void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004413}
4414
4415void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004416 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004417}
4418
4419void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004420 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004421}
4422
Alexandre Rames67555f72014-11-18 10:55:16 +00004423void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004424 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004425}
4426
4427void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004428 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004429}
4430
4431void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004432 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004433}
4434
Alexandre Rames67555f72014-11-18 10:55:16 +00004435void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004436 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004437}
4438
Calin Juravlee460d1d2015-09-29 04:52:17 +01004439void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4440 HUnresolvedInstanceFieldGet* instruction) {
4441 FieldAccessCallingConventionARM64 calling_convention;
4442 codegen_->CreateUnresolvedFieldLocationSummary(
4443 instruction, instruction->GetFieldType(), calling_convention);
4444}
4445
4446void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4447 HUnresolvedInstanceFieldGet* instruction) {
4448 FieldAccessCallingConventionARM64 calling_convention;
4449 codegen_->GenerateUnresolvedFieldAccess(instruction,
4450 instruction->GetFieldType(),
4451 instruction->GetFieldIndex(),
4452 instruction->GetDexPc(),
4453 calling_convention);
4454}
4455
4456void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4457 HUnresolvedInstanceFieldSet* instruction) {
4458 FieldAccessCallingConventionARM64 calling_convention;
4459 codegen_->CreateUnresolvedFieldLocationSummary(
4460 instruction, instruction->GetFieldType(), calling_convention);
4461}
4462
4463void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4464 HUnresolvedInstanceFieldSet* instruction) {
4465 FieldAccessCallingConventionARM64 calling_convention;
4466 codegen_->GenerateUnresolvedFieldAccess(instruction,
4467 instruction->GetFieldType(),
4468 instruction->GetFieldIndex(),
4469 instruction->GetDexPc(),
4470 calling_convention);
4471}
4472
4473void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4474 HUnresolvedStaticFieldGet* instruction) {
4475 FieldAccessCallingConventionARM64 calling_convention;
4476 codegen_->CreateUnresolvedFieldLocationSummary(
4477 instruction, instruction->GetFieldType(), calling_convention);
4478}
4479
4480void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4481 HUnresolvedStaticFieldGet* instruction) {
4482 FieldAccessCallingConventionARM64 calling_convention;
4483 codegen_->GenerateUnresolvedFieldAccess(instruction,
4484 instruction->GetFieldType(),
4485 instruction->GetFieldIndex(),
4486 instruction->GetDexPc(),
4487 calling_convention);
4488}
4489
4490void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4491 HUnresolvedStaticFieldSet* instruction) {
4492 FieldAccessCallingConventionARM64 calling_convention;
4493 codegen_->CreateUnresolvedFieldLocationSummary(
4494 instruction, instruction->GetFieldType(), calling_convention);
4495}
4496
4497void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4498 HUnresolvedStaticFieldSet* instruction) {
4499 FieldAccessCallingConventionARM64 calling_convention;
4500 codegen_->GenerateUnresolvedFieldAccess(instruction,
4501 instruction->GetFieldType(),
4502 instruction->GetFieldIndex(),
4503 instruction->GetDexPc(),
4504 calling_convention);
4505}
4506
Alexandre Rames5319def2014-10-23 10:03:10 +01004507void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4508 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4509}
4510
4511void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004512 HBasicBlock* block = instruction->GetBlock();
4513 if (block->GetLoopInformation() != nullptr) {
4514 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4515 // The back edge will generate the suspend check.
4516 return;
4517 }
4518 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4519 // The goto will generate the suspend check.
4520 return;
4521 }
4522 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004523}
4524
Alexandre Rames67555f72014-11-18 10:55:16 +00004525void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4526 LocationSummary* locations =
4527 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4528 InvokeRuntimeCallingConvention calling_convention;
4529 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4530}
4531
4532void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
4533 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004534 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004535 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004536}
4537
4538void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4539 LocationSummary* locations =
4540 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4541 Primitive::Type input_type = conversion->GetInputType();
4542 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004543 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004544 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4545 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4546 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4547 }
4548
Alexandre Rames542361f2015-01-29 16:57:31 +00004549 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004550 locations->SetInAt(0, Location::RequiresFpuRegister());
4551 } else {
4552 locations->SetInAt(0, Location::RequiresRegister());
4553 }
4554
Alexandre Rames542361f2015-01-29 16:57:31 +00004555 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004556 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4557 } else {
4558 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4559 }
4560}
4561
4562void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4563 Primitive::Type result_type = conversion->GetResultType();
4564 Primitive::Type input_type = conversion->GetInputType();
4565
4566 DCHECK_NE(input_type, result_type);
4567
Alexandre Rames542361f2015-01-29 16:57:31 +00004568 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004569 int result_size = Primitive::ComponentSize(result_type);
4570 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004571 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004572 Register output = OutputRegister(conversion);
4573 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004574 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004575 // 'int' values are used directly as W registers, discarding the top
4576 // bits, so we don't need to sign-extend and can just perform a move.
4577 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4578 // top 32 bits of the target register. We theoretically could leave those
4579 // bits unchanged, but we would have to make sure that no code uses a
4580 // 32bit input value as a 64bit value assuming that the top 32 bits are
4581 // zero.
4582 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004583 } else if (result_type == Primitive::kPrimChar ||
4584 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4585 __ Ubfx(output,
4586 output.IsX() ? source.X() : source.W(),
4587 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004588 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004589 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004590 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004591 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004592 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004593 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004594 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4595 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004596 } else if (Primitive::IsFloatingPointType(result_type) &&
4597 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004598 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4599 } else {
4600 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4601 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004602 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004603}
Alexandre Rames67555f72014-11-18 10:55:16 +00004604
Serban Constantinescu02164b32014-11-13 14:05:07 +00004605void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4606 HandleShift(ushr);
4607}
4608
4609void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4610 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004611}
4612
4613void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4614 HandleBinaryOp(instruction);
4615}
4616
4617void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4618 HandleBinaryOp(instruction);
4619}
4620
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004621void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004622 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004623 LOG(FATAL) << "Unreachable";
4624}
4625
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004626void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004627 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004628 LOG(FATAL) << "Unreachable";
4629}
4630
Mark Mendellfe57faa2015-09-18 09:26:15 -04004631// Simple implementation of packed switch - generate cascaded compare/jumps.
4632void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4633 LocationSummary* locations =
4634 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4635 locations->SetInAt(0, Location::RequiresRegister());
4636}
4637
4638void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4639 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004640 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004641 Register value_reg = InputRegisterAt(switch_instr, 0);
4642 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4643
Zheng Xu3927c8b2015-11-18 17:46:25 +08004644 // Roughly set 16 as max average assemblies generated per HIR in a graph.
4645 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * vixl::kInstructionSize;
4646 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4647 // make sure we don't emit it if the target may run out of range.
4648 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4649 // ranges and emit the tables only as required.
4650 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004651
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004652 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004653 // Current instruction id is an upper bound of the number of HIRs in the graph.
4654 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4655 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004656 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4657 Register temp = temps.AcquireW();
4658 __ Subs(temp, value_reg, Operand(lower_bound));
4659
Zheng Xu3927c8b2015-11-18 17:46:25 +08004660 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004661 // Jump to successors[0] if value == lower_bound.
4662 __ B(eq, codegen_->GetLabelOf(successors[0]));
4663 int32_t last_index = 0;
4664 for (; num_entries - last_index > 2; last_index += 2) {
4665 __ Subs(temp, temp, Operand(2));
4666 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4667 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4668 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4669 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4670 }
4671 if (num_entries - last_index == 2) {
4672 // The last missing case_value.
4673 __ Cmp(temp, Operand(1));
4674 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004675 }
4676
4677 // And the default for any other value.
4678 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4679 __ B(codegen_->GetLabelOf(default_block));
4680 }
4681 } else {
4682 JumpTableARM64* jump_table = new (GetGraph()->GetArena()) JumpTableARM64(switch_instr);
4683 codegen_->AddJumpTable(jump_table);
4684
4685 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4686
4687 // Below instructions should use at most one blocked register. Since there are two blocked
4688 // registers, we are free to block one.
4689 Register temp_w = temps.AcquireW();
4690 Register index;
4691 // Remove the bias.
4692 if (lower_bound != 0) {
4693 index = temp_w;
4694 __ Sub(index, value_reg, Operand(lower_bound));
4695 } else {
4696 index = value_reg;
4697 }
4698
4699 // Jump to default block if index is out of the range.
4700 __ Cmp(index, Operand(num_entries));
4701 __ B(hs, codegen_->GetLabelOf(default_block));
4702
4703 // In current VIXL implementation, it won't require any blocked registers to encode the
4704 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4705 // register pressure.
4706 Register table_base = temps.AcquireX();
4707 // Load jump offset from the table.
4708 __ Adr(table_base, jump_table->GetTableStartLabel());
4709 Register jump_offset = temp_w;
4710 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4711
4712 // Jump to target block by branching to table_base(pc related) + offset.
4713 Register target_address = table_base;
4714 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4715 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004716 }
4717}
4718
Roland Levillain44015862016-01-22 11:47:17 +00004719void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4720 Location out,
4721 uint32_t offset,
4722 Location maybe_temp) {
4723 Primitive::Type type = Primitive::kPrimNot;
4724 Register out_reg = RegisterFrom(out, type);
4725 if (kEmitCompilerReadBarrier) {
4726 Register temp_reg = RegisterFrom(maybe_temp, type);
4727 if (kUseBakerReadBarrier) {
4728 // Load with fast path based Baker's read barrier.
4729 // /* HeapReference<Object> */ out = *(out + offset)
4730 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4731 out,
4732 out_reg,
4733 offset,
4734 temp_reg,
4735 /* needs_null_check */ false,
4736 /* use_load_acquire */ false);
4737 } else {
4738 // Load with slow path based read barrier.
4739 // Save the value of `out` into `maybe_temp` before overwriting it
4740 // in the following move operation, as we will need it for the
4741 // read barrier below.
4742 __ Mov(temp_reg, out_reg);
4743 // /* HeapReference<Object> */ out = *(out + offset)
4744 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4745 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4746 }
4747 } else {
4748 // Plain load with no read barrier.
4749 // /* HeapReference<Object> */ out = *(out + offset)
4750 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4751 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4752 }
4753}
4754
4755void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
4756 Location out,
4757 Location obj,
4758 uint32_t offset,
4759 Location maybe_temp) {
4760 Primitive::Type type = Primitive::kPrimNot;
4761 Register out_reg = RegisterFrom(out, type);
4762 Register obj_reg = RegisterFrom(obj, type);
4763 if (kEmitCompilerReadBarrier) {
4764 if (kUseBakerReadBarrier) {
4765 // Load with fast path based Baker's read barrier.
4766 Register temp_reg = RegisterFrom(maybe_temp, type);
4767 // /* HeapReference<Object> */ out = *(obj + offset)
4768 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4769 out,
4770 obj_reg,
4771 offset,
4772 temp_reg,
4773 /* needs_null_check */ false,
4774 /* use_load_acquire */ false);
4775 } else {
4776 // Load with slow path based read barrier.
4777 // /* HeapReference<Object> */ out = *(obj + offset)
4778 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4779 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4780 }
4781 } else {
4782 // Plain load with no read barrier.
4783 // /* HeapReference<Object> */ out = *(obj + offset)
4784 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4785 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4786 }
4787}
4788
4789void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
4790 Location root,
4791 vixl::Register obj,
4792 uint32_t offset) {
4793 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
4794 if (kEmitCompilerReadBarrier) {
4795 if (kUseBakerReadBarrier) {
4796 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4797 // Baker's read barrier are used:
4798 //
4799 // root = obj.field;
4800 // if (Thread::Current()->GetIsGcMarking()) {
4801 // root = ReadBarrier::Mark(root)
4802 // }
4803
4804 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
4805 __ Ldr(root_reg, MemOperand(obj, offset));
4806 static_assert(
4807 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
4808 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
4809 "have different sizes.");
4810 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
4811 "art::mirror::CompressedReference<mirror::Object> and int32_t "
4812 "have different sizes.");
4813
4814 // Slow path used to mark the GC root `root`.
4815 SlowPathCodeARM64* slow_path =
4816 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root, root);
4817 codegen_->AddSlowPath(slow_path);
4818
4819 MacroAssembler* masm = GetVIXLAssembler();
4820 UseScratchRegisterScope temps(masm);
4821 Register temp = temps.AcquireW();
4822 // temp = Thread::Current()->GetIsGcMarking()
4823 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64WordSize>().Int32Value()));
4824 __ Cbnz(temp, slow_path->GetEntryLabel());
4825 __ Bind(slow_path->GetExitLabel());
4826 } else {
4827 // GC root loaded through a slow path for read barriers other
4828 // than Baker's.
4829 // /* GcRoot<mirror::Object>* */ root = obj + offset
4830 __ Add(root_reg.X(), obj.X(), offset);
4831 // /* mirror::Object* */ root = root->Read()
4832 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
4833 }
4834 } else {
4835 // Plain GC root load with no read barrier.
4836 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
4837 __ Ldr(root_reg, MemOperand(obj, offset));
4838 // Note that GC roots are not affected by heap poisoning, thus we
4839 // do not have to unpoison `root_reg` here.
4840 }
4841}
4842
4843void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
4844 Location ref,
4845 vixl::Register obj,
4846 uint32_t offset,
4847 Register temp,
4848 bool needs_null_check,
4849 bool use_load_acquire) {
4850 DCHECK(kEmitCompilerReadBarrier);
4851 DCHECK(kUseBakerReadBarrier);
4852
4853 // /* HeapReference<Object> */ ref = *(obj + offset)
4854 Location no_index = Location::NoLocation();
4855 GenerateReferenceLoadWithBakerReadBarrier(
4856 instruction, ref, obj, offset, no_index, temp, needs_null_check, use_load_acquire);
4857}
4858
4859void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
4860 Location ref,
4861 vixl::Register obj,
4862 uint32_t data_offset,
4863 Location index,
4864 Register temp,
4865 bool needs_null_check) {
4866 DCHECK(kEmitCompilerReadBarrier);
4867 DCHECK(kUseBakerReadBarrier);
4868
4869 // Array cells are never volatile variables, therefore array loads
4870 // never use Load-Acquire instructions on ARM64.
4871 const bool use_load_acquire = false;
4872
4873 // /* HeapReference<Object> */ ref =
4874 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4875 GenerateReferenceLoadWithBakerReadBarrier(
4876 instruction, ref, obj, data_offset, index, temp, needs_null_check, use_load_acquire);
4877}
4878
4879void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
4880 Location ref,
4881 vixl::Register obj,
4882 uint32_t offset,
4883 Location index,
4884 Register temp,
4885 bool needs_null_check,
4886 bool use_load_acquire) {
4887 DCHECK(kEmitCompilerReadBarrier);
4888 DCHECK(kUseBakerReadBarrier);
4889 // If `index` is a valid location, then we are emitting an array
4890 // load, so we shouldn't be using a Load Acquire instruction.
4891 // In other words: `index.IsValid()` => `!use_load_acquire`.
4892 DCHECK(!index.IsValid() || !use_load_acquire);
4893
4894 MacroAssembler* masm = GetVIXLAssembler();
4895 UseScratchRegisterScope temps(masm);
4896
4897 // In slow path based read barriers, the read barrier call is
4898 // inserted after the original load. However, in fast path based
4899 // Baker's read barriers, we need to perform the load of
4900 // mirror::Object::monitor_ *before* the original reference load.
4901 // This load-load ordering is required by the read barrier.
4902 // The fast path/slow path (for Baker's algorithm) should look like:
4903 //
4904 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
4905 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
4906 // HeapReference<Object> ref = *src; // Original reference load.
4907 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
4908 // if (is_gray) {
4909 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
4910 // }
4911 //
4912 // Note: the original implementation in ReadBarrier::Barrier is
4913 // slightly more complex as it performs additional checks that we do
4914 // not do here for performance reasons.
4915
4916 Primitive::Type type = Primitive::kPrimNot;
4917 Register ref_reg = RegisterFrom(ref, type);
4918 DCHECK(obj.IsW());
4919 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
4920
4921 // /* int32_t */ monitor = obj->monitor_
4922 __ Ldr(temp, HeapOperand(obj, monitor_offset));
4923 if (needs_null_check) {
4924 MaybeRecordImplicitNullCheck(instruction);
4925 }
4926 // /* LockWord */ lock_word = LockWord(monitor)
4927 static_assert(sizeof(LockWord) == sizeof(int32_t),
4928 "art::LockWord and int32_t have different sizes.");
4929 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
4930 __ Lsr(temp, temp, LockWord::kReadBarrierStateShift);
4931 __ And(temp, temp, Operand(LockWord::kReadBarrierStateMask));
4932 static_assert(
4933 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
4934 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
4935
4936 // Introduce a dependency on the high bits of rb_state, which shall
4937 // be all zeroes, to prevent load-load reordering, and without using
4938 // a memory barrier (which would be more expensive).
4939 // temp2 = rb_state & ~LockWord::kReadBarrierStateMask = 0
4940 Register temp2 = temps.AcquireW();
4941 __ Bic(temp2, temp, Operand(LockWord::kReadBarrierStateMask));
4942 // obj is unchanged by this operation, but its value now depends on
4943 // temp2, which depends on temp.
4944 __ Add(obj, obj, Operand(temp2));
4945 temps.Release(temp2);
4946
4947 // The actual reference load.
4948 if (index.IsValid()) {
4949 static_assert(
4950 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4951 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00004952 // /* HeapReference<Object> */ ref =
4953 // *(obj + offset + index * sizeof(HeapReference<Object>))
Roland Levillainca0bf032016-02-09 12:49:18 +00004954 const size_t shift_amount = Primitive::ComponentSizeShift(type);
Roland Levillain44015862016-01-22 11:47:17 +00004955 if (index.IsConstant()) {
Roland Levillainca0bf032016-02-09 12:49:18 +00004956 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << shift_amount);
4957 Load(type, ref_reg, HeapOperand(obj, computed_offset));
Roland Levillain44015862016-01-22 11:47:17 +00004958 } else {
Roland Levillainca0bf032016-02-09 12:49:18 +00004959 temp2 = temps.AcquireW();
Roland Levillain44015862016-01-22 11:47:17 +00004960 __ Add(temp2, obj, offset);
Roland Levillainca0bf032016-02-09 12:49:18 +00004961 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, shift_amount));
4962 temps.Release(temp2);
Roland Levillain44015862016-01-22 11:47:17 +00004963 }
Roland Levillain44015862016-01-22 11:47:17 +00004964 } else {
4965 // /* HeapReference<Object> */ ref = *(obj + offset)
4966 MemOperand field = HeapOperand(obj, offset);
4967 if (use_load_acquire) {
4968 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
4969 } else {
4970 Load(type, ref_reg, field);
4971 }
4972 }
4973
4974 // Object* ref = ref_addr->AsMirrorPtr()
4975 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
4976
4977 // Slow path used to mark the object `ref` when it is gray.
4978 SlowPathCodeARM64* slow_path =
4979 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref, ref);
4980 AddSlowPath(slow_path);
4981
4982 // if (rb_state == ReadBarrier::gray_ptr_)
4983 // ref = ReadBarrier::Mark(ref);
4984 __ Cmp(temp, ReadBarrier::gray_ptr_);
4985 __ B(eq, slow_path->GetEntryLabel());
4986 __ Bind(slow_path->GetExitLabel());
4987}
4988
4989void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
4990 Location out,
4991 Location ref,
4992 Location obj,
4993 uint32_t offset,
4994 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004995 DCHECK(kEmitCompilerReadBarrier);
4996
Roland Levillain44015862016-01-22 11:47:17 +00004997 // Insert a slow path based read barrier *after* the reference load.
4998 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004999 // If heap poisoning is enabled, the unpoisoning of the loaded
5000 // reference will be carried out by the runtime within the slow
5001 // path.
5002 //
5003 // Note that `ref` currently does not get unpoisoned (when heap
5004 // poisoning is enabled), which is alright as the `ref` argument is
5005 // not used by the artReadBarrierSlow entry point.
5006 //
5007 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5008 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5009 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5010 AddSlowPath(slow_path);
5011
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005012 __ B(slow_path->GetEntryLabel());
5013 __ Bind(slow_path->GetExitLabel());
5014}
5015
Roland Levillain44015862016-01-22 11:47:17 +00005016void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5017 Location out,
5018 Location ref,
5019 Location obj,
5020 uint32_t offset,
5021 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005022 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005023 // Baker's read barriers shall be handled by the fast path
5024 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5025 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005026 // If heap poisoning is enabled, unpoisoning will be taken care of
5027 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005028 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005029 } else if (kPoisonHeapReferences) {
5030 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5031 }
5032}
5033
Roland Levillain44015862016-01-22 11:47:17 +00005034void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5035 Location out,
5036 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005037 DCHECK(kEmitCompilerReadBarrier);
5038
Roland Levillain44015862016-01-22 11:47:17 +00005039 // Insert a slow path based read barrier *after* the GC root load.
5040 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005041 // Note that GC roots are not affected by heap poisoning, so we do
5042 // not need to do anything special for this here.
5043 SlowPathCodeARM64* slow_path =
5044 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5045 AddSlowPath(slow_path);
5046
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005047 __ B(slow_path->GetEntryLabel());
5048 __ Bind(slow_path->GetExitLabel());
5049}
5050
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005051void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5052 LocationSummary* locations =
5053 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5054 locations->SetInAt(0, Location::RequiresRegister());
5055 locations->SetOut(Location::RequiresRegister());
5056}
5057
5058void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5059 LocationSummary* locations = instruction->GetLocations();
5060 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00005061 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005062 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5063 instruction->GetIndex(), kArm64PointerSize).SizeValue();
5064 } else {
5065 method_offset = mirror::Class::EmbeddedImTableEntryOffset(
5066 instruction->GetIndex() % mirror::Class::kImtSize, kArm64PointerSize).Uint32Value();
5067 }
5068 __ Ldr(XRegisterFrom(locations->Out()),
5069 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
5070}
5071
5072
5073
Alexandre Rames67555f72014-11-18 10:55:16 +00005074#undef __
5075#undef QUICK_ENTRY_POINT
5076
Alexandre Rames5319def2014-10-23 10:03:10 +01005077} // namespace arm64
5078} // namespace art