blob: 5560ae2c74ec868e7a5ae33c771aaa467f00a0d6 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
36
37using namespace vixl; // NOLINT(build/namespaces)
38
39#ifdef __
40#error "ARM64 Codegen VIXL macro-assembler macro already defined."
41#endif
42
Alexandre Rames5319def2014-10-23 10:03:10 +010043namespace art {
44
Roland Levillain22ccc3a2015-11-24 13:10:05 +000045template<class MirrorType>
46class GcRoot;
47
Alexandre Rames5319def2014-10-23 10:03:10 +010048namespace arm64 {
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050using helpers::CPURegisterFrom;
51using helpers::DRegisterFrom;
52using helpers::FPRegisterFrom;
53using helpers::HeapOperand;
54using helpers::HeapOperandFrom;
55using helpers::InputCPURegisterAt;
56using helpers::InputFPRegisterAt;
57using helpers::InputRegisterAt;
58using helpers::InputOperandAt;
59using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080060using helpers::LocationFrom;
61using helpers::OperandFromMemOperand;
62using helpers::OutputCPURegister;
63using helpers::OutputFPRegister;
64using helpers::OutputRegister;
65using helpers::RegisterFrom;
66using helpers::StackOperandFrom;
67using helpers::VIXLRegCodeFromART;
68using helpers::WRegisterFrom;
69using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000070using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080071using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080072
Alexandre Rames5319def2014-10-23 10:03:10 +010073static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000074// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080075// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
76// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000077static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010078
Alexandre Rames5319def2014-10-23 10:03:10 +010079inline Condition ARM64Condition(IfCondition cond) {
80 switch (cond) {
81 case kCondEQ: return eq;
82 case kCondNE: return ne;
83 case kCondLT: return lt;
84 case kCondLE: return le;
85 case kCondGT: return gt;
86 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070087 case kCondB: return lo;
88 case kCondBE: return ls;
89 case kCondA: return hi;
90 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010091 }
Roland Levillain7f63c522015-07-13 15:54:55 +000092 LOG(FATAL) << "Unreachable";
93 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010094}
95
Vladimir Markod6e069b2016-01-18 11:11:01 +000096inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
97 // The ARM64 condition codes can express all the necessary branches, see the
98 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
99 // There is no dex instruction or HIR that would need the missing conditions
100 // "equal or unordered" or "not equal".
101 switch (cond) {
102 case kCondEQ: return eq;
103 case kCondNE: return ne /* unordered */;
104 case kCondLT: return gt_bias ? cc : lt /* unordered */;
105 case kCondLE: return gt_bias ? ls : le /* unordered */;
106 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
107 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
108 default:
109 LOG(FATAL) << "UNREACHABLE";
110 UNREACHABLE();
111 }
112}
113
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000114Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
116 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
117 // but we use the exact registers for clarity.
118 if (return_type == Primitive::kPrimFloat) {
119 return LocationFrom(s0);
120 } else if (return_type == Primitive::kPrimDouble) {
121 return LocationFrom(d0);
122 } else if (return_type == Primitive::kPrimLong) {
123 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100124 } else if (return_type == Primitive::kPrimVoid) {
125 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000126 } else {
127 return LocationFrom(w0);
128 }
129}
130
Alexandre Rames5319def2014-10-23 10:03:10 +0100131Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100133}
134
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700135// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
136#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Alexandre Rames67555f72014-11-18 10:55:16 +0000137#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100138
Zheng Xuda403092015-04-24 17:35:39 +0800139// Calculate memory accessing operand for save/restore live registers.
140static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
141 RegisterSet* register_set,
142 int64_t spill_offset,
143 bool is_save) {
144 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
145 codegen->GetNumberOfCoreRegisters(),
146 register_set->GetFloatingPointRegisters(),
147 codegen->GetNumberOfFloatingPointRegisters()));
148
149 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
150 register_set->GetCoreRegisters() & (~callee_saved_core_registers.list()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000151 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
152 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.list()));
Zheng Xuda403092015-04-24 17:35:39 +0800153
154 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
155 UseScratchRegisterScope temps(masm);
156
157 Register base = masm->StackPointer();
158 int64_t core_spill_size = core_list.TotalSizeInBytes();
159 int64_t fp_spill_size = fp_list.TotalSizeInBytes();
160 int64_t reg_size = kXRegSizeInBytes;
161 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
162 uint32_t ls_access_size = WhichPowerOf2(reg_size);
163 if (((core_list.Count() > 1) || (fp_list.Count() > 1)) &&
164 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
165 // If the offset does not fit in the instruction's immediate field, use an alternate register
166 // to compute the base address(float point registers spill base address).
167 Register new_base = temps.AcquireSameSizeAs(base);
168 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
169 base = new_base;
170 spill_offset = -core_spill_size;
171 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
172 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
173 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
174 }
175
176 if (is_save) {
177 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
178 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
179 } else {
180 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
181 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
182 }
183}
184
185void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
186 RegisterSet* register_set = locations->GetLiveRegisters();
187 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
188 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
189 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
190 // If the register holds an object, update the stack mask.
191 if (locations->RegisterContainsObject(i)) {
192 locations->SetStackBit(stack_offset / kVRegSize);
193 }
194 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
195 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
196 saved_core_stack_offsets_[i] = stack_offset;
197 stack_offset += kXRegSizeInBytes;
198 }
199 }
200
201 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
202 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
203 register_set->ContainsFloatingPointRegister(i)) {
204 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
205 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
206 saved_fpu_stack_offsets_[i] = stack_offset;
207 stack_offset += kDRegSizeInBytes;
208 }
209 }
210
211 SaveRestoreLiveRegistersHelper(codegen, register_set,
212 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
213}
214
215void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
216 RegisterSet* register_set = locations->GetLiveRegisters();
217 SaveRestoreLiveRegistersHelper(codegen, register_set,
218 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
219}
220
Alexandre Rames5319def2014-10-23 10:03:10 +0100221class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
222 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000223 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100224
Alexandre Rames67555f72014-11-18 10:55:16 +0000225 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100226 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000227 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100228
Alexandre Rames5319def2014-10-23 10:03:10 +0100229 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000230 if (instruction_->CanThrowIntoCatchBlock()) {
231 // Live registers will be restored in the catch block if caught.
232 SaveLiveRegisters(codegen, instruction_->GetLocations());
233 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000234 // We're moving two locations to locations that could overlap, so we need a parallel
235 // move resolver.
236 InvokeRuntimeCallingConvention calling_convention;
237 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100238 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
239 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000240 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000241 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800242 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100243 }
244
Alexandre Rames8158f282015-08-07 10:26:17 +0100245 bool IsFatal() const OVERRIDE { return true; }
246
Alexandre Rames9931f312015-06-19 14:47:01 +0100247 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
248
Alexandre Rames5319def2014-10-23 10:03:10 +0100249 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100250 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
251};
252
Alexandre Rames67555f72014-11-18 10:55:16 +0000253class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
254 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000255 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000256
257 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
258 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
259 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000260 if (instruction_->CanThrowIntoCatchBlock()) {
261 // Live registers will be restored in the catch block if caught.
262 SaveLiveRegisters(codegen, instruction_->GetLocations());
263 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000264 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000265 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800266 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000267 }
268
Alexandre Rames8158f282015-08-07 10:26:17 +0100269 bool IsFatal() const OVERRIDE { return true; }
270
Alexandre Rames9931f312015-06-19 14:47:01 +0100271 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
272
Alexandre Rames67555f72014-11-18 10:55:16 +0000273 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000274 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
275};
276
277class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
278 public:
279 LoadClassSlowPathARM64(HLoadClass* cls,
280 HInstruction* at,
281 uint32_t dex_pc,
282 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000283 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000284 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
285 }
286
287 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
288 LocationSummary* locations = at_->GetLocations();
289 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
290
291 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000292 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000293
294 InvokeRuntimeCallingConvention calling_convention;
295 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000296 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
297 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000298 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800299 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100300 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800301 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100302 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800303 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000304
305 // Move the class to the desired location.
306 Location out = locations->Out();
307 if (out.IsValid()) {
308 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
309 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000310 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000311 }
312
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000313 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000314 __ B(GetExitLabel());
315 }
316
Alexandre Rames9931f312015-06-19 14:47:01 +0100317 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
318
Alexandre Rames67555f72014-11-18 10:55:16 +0000319 private:
320 // The class this slow path will load.
321 HLoadClass* const cls_;
322
323 // The instruction where this slow path is happening.
324 // (Might be the load class or an initialization check).
325 HInstruction* const at_;
326
327 // The dex PC of `at_`.
328 const uint32_t dex_pc_;
329
330 // Whether to initialize the class.
331 const bool do_clinit_;
332
333 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
334};
335
336class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
337 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000338 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000339
340 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
341 LocationSummary* locations = instruction_->GetLocations();
342 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
343 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
344
345 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000346 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000347
348 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000349 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
350 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Alexandre Rames67555f72014-11-18 10:55:16 +0000351 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000352 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100353 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000354 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000355 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000356
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000357 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000358 __ B(GetExitLabel());
359 }
360
Alexandre Rames9931f312015-06-19 14:47:01 +0100361 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
362
Alexandre Rames67555f72014-11-18 10:55:16 +0000363 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000364 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
365};
366
Alexandre Rames5319def2014-10-23 10:03:10 +0100367class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
368 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000369 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100370
Alexandre Rames67555f72014-11-18 10:55:16 +0000371 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
372 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100373 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000374 if (instruction_->CanThrowIntoCatchBlock()) {
375 // Live registers will be restored in the catch block if caught.
376 SaveLiveRegisters(codegen, instruction_->GetLocations());
377 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000378 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000379 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800380 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100381 }
382
Alexandre Rames8158f282015-08-07 10:26:17 +0100383 bool IsFatal() const OVERRIDE { return true; }
384
Alexandre Rames9931f312015-06-19 14:47:01 +0100385 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
386
Alexandre Rames5319def2014-10-23 10:03:10 +0100387 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100388 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
389};
390
391class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
392 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100393 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000394 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100395
Alexandre Rames67555f72014-11-18 10:55:16 +0000396 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
397 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100398 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000399 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000400 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000401 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800402 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000403 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000404 if (successor_ == nullptr) {
405 __ B(GetReturnLabel());
406 } else {
407 __ B(arm64_codegen->GetLabelOf(successor_));
408 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100409 }
410
411 vixl::Label* GetReturnLabel() {
412 DCHECK(successor_ == nullptr);
413 return &return_label_;
414 }
415
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100416 HBasicBlock* GetSuccessor() const {
417 return successor_;
418 }
419
Alexandre Rames9931f312015-06-19 14:47:01 +0100420 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
421
Alexandre Rames5319def2014-10-23 10:03:10 +0100422 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100423 // If not null, the block to branch to after the suspend check.
424 HBasicBlock* const successor_;
425
426 // If `successor_` is null, the label to branch to after the suspend check.
427 vixl::Label return_label_;
428
429 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
430};
431
Alexandre Rames67555f72014-11-18 10:55:16 +0000432class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
433 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000434 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000435 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000436
437 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000438 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100439 Location class_to_check = locations->InAt(1);
440 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
441 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000442 DCHECK(instruction_->IsCheckCast()
443 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
444 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100445 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000446
Alexandre Rames67555f72014-11-18 10:55:16 +0000447 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000448
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000449 if (!is_fatal_) {
450 SaveLiveRegisters(codegen, locations);
451 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000452
453 // We're moving two locations to locations that could overlap, so we need a parallel
454 // move resolver.
455 InvokeRuntimeCallingConvention calling_convention;
456 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100457 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
458 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000459
460 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000461 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100462 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Roland Levillain888d0672015-11-23 18:53:50 +0000463 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
464 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000465 Primitive::Type ret_type = instruction_->GetType();
466 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
467 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
468 } else {
469 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100470 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800471 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000472 }
473
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000474 if (!is_fatal_) {
475 RestoreLiveRegisters(codegen, locations);
476 __ B(GetExitLabel());
477 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000478 }
479
Alexandre Rames9931f312015-06-19 14:47:01 +0100480 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000481 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100482
Alexandre Rames67555f72014-11-18 10:55:16 +0000483 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000484 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000485
Alexandre Rames67555f72014-11-18 10:55:16 +0000486 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
487};
488
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700489class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
490 public:
Aart Bik42249c32016-01-07 15:33:50 -0800491 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000492 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700493
494 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800495 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700496 __ Bind(GetEntryLabel());
497 SaveLiveRegisters(codegen, instruction_->GetLocations());
Aart Bik42249c32016-01-07 15:33:50 -0800498 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
499 instruction_,
500 instruction_->GetDexPc(),
501 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000502 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700503 }
504
Alexandre Rames9931f312015-06-19 14:47:01 +0100505 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
506
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700507 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700508 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
509};
510
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100511class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
512 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000513 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100514
515 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
516 LocationSummary* locations = instruction_->GetLocations();
517 __ Bind(GetEntryLabel());
518 SaveLiveRegisters(codegen, locations);
519
520 InvokeRuntimeCallingConvention calling_convention;
521 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
522 parallel_move.AddMove(
523 locations->InAt(0),
524 LocationFrom(calling_convention.GetRegisterAt(0)),
525 Primitive::kPrimNot,
526 nullptr);
527 parallel_move.AddMove(
528 locations->InAt(1),
529 LocationFrom(calling_convention.GetRegisterAt(1)),
530 Primitive::kPrimInt,
531 nullptr);
532 parallel_move.AddMove(
533 locations->InAt(2),
534 LocationFrom(calling_convention.GetRegisterAt(2)),
535 Primitive::kPrimNot,
536 nullptr);
537 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
538
539 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
540 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
541 instruction_,
542 instruction_->GetDexPc(),
543 this);
544 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
545 RestoreLiveRegisters(codegen, locations);
546 __ B(GetExitLabel());
547 }
548
549 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
550
551 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100552 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
553};
554
Zheng Xu3927c8b2015-11-18 17:46:25 +0800555void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
556 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000557 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800558
559 // We are about to use the assembler to place literals directly. Make sure we have enough
560 // underlying code buffer and we have generated the jump table with right size.
561 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
562 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
563
564 __ Bind(&table_start_);
565 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
566 for (uint32_t i = 0; i < num_entries; i++) {
567 vixl::Label* target_label = codegen->GetLabelOf(successors[i]);
568 DCHECK(target_label->IsBound());
569 ptrdiff_t jump_offset = target_label->location() - table_start_.location();
570 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
571 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
572 Literal<int32_t> literal(jump_offset);
573 __ place(&literal);
574 }
575}
576
Roland Levillain44015862016-01-22 11:47:17 +0000577// Slow path marking an object during a read barrier.
578class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
579 public:
580 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000581 : SlowPathCodeARM64(instruction), out_(out), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000582 DCHECK(kEmitCompilerReadBarrier);
583 }
584
585 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
586
587 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
588 LocationSummary* locations = instruction_->GetLocations();
589 Primitive::Type type = Primitive::kPrimNot;
590 DCHECK(locations->CanCall());
591 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
592 DCHECK(instruction_->IsInstanceFieldGet() ||
593 instruction_->IsStaticFieldGet() ||
594 instruction_->IsArrayGet() ||
595 instruction_->IsLoadClass() ||
596 instruction_->IsLoadString() ||
597 instruction_->IsInstanceOf() ||
598 instruction_->IsCheckCast())
599 << "Unexpected instruction in read barrier marking slow path: "
600 << instruction_->DebugName();
601
602 __ Bind(GetEntryLabel());
603 SaveLiveRegisters(codegen, locations);
604
605 InvokeRuntimeCallingConvention calling_convention;
606 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
607 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)), obj_, type);
608 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
609 instruction_,
610 instruction_->GetDexPc(),
611 this);
612 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
613 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
614
615 RestoreLiveRegisters(codegen, locations);
616 __ B(GetExitLabel());
617 }
618
619 private:
Roland Levillain44015862016-01-22 11:47:17 +0000620 const Location out_;
621 const Location obj_;
622
623 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
624};
625
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000626// Slow path generating a read barrier for a heap reference.
627class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
628 public:
629 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
630 Location out,
631 Location ref,
632 Location obj,
633 uint32_t offset,
634 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000635 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000636 out_(out),
637 ref_(ref),
638 obj_(obj),
639 offset_(offset),
640 index_(index) {
641 DCHECK(kEmitCompilerReadBarrier);
642 // If `obj` is equal to `out` or `ref`, it means the initial object
643 // has been overwritten by (or after) the heap object reference load
644 // to be instrumented, e.g.:
645 //
646 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000647 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000648 //
649 // In that case, we have lost the information about the original
650 // object, and the emitted read barrier cannot work properly.
651 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
652 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
653 }
654
655 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
656 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
657 LocationSummary* locations = instruction_->GetLocations();
658 Primitive::Type type = Primitive::kPrimNot;
659 DCHECK(locations->CanCall());
660 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
661 DCHECK(!instruction_->IsInvoke() ||
662 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain44015862016-01-22 11:47:17 +0000663 instruction_->GetLocations()->Intrinsified()))
664 << "Unexpected instruction in read barrier for heap reference slow path: "
665 << instruction_->DebugName();
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000666 // The read barrier instrumentation does not support the
667 // HArm64IntermediateAddress instruction yet.
668 DCHECK(!(instruction_->IsArrayGet() &&
669 instruction_->AsArrayGet()->GetArray()->IsArm64IntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000670
671 __ Bind(GetEntryLabel());
672
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000673 SaveLiveRegisters(codegen, locations);
674
675 // We may have to change the index's value, but as `index_` is a
676 // constant member (like other "inputs" of this slow path),
677 // introduce a copy of it, `index`.
678 Location index = index_;
679 if (index_.IsValid()) {
680 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
681 if (instruction_->IsArrayGet()) {
682 // Compute the actual memory offset and store it in `index`.
683 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
684 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
685 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
686 // We are about to change the value of `index_reg` (see the
687 // calls to vixl::MacroAssembler::Lsl and
688 // vixl::MacroAssembler::Mov below), but it has
689 // not been saved by the previous call to
690 // art::SlowPathCode::SaveLiveRegisters, as it is a
691 // callee-save register --
692 // art::SlowPathCode::SaveLiveRegisters does not consider
693 // callee-save registers, as it has been designed with the
694 // assumption that callee-save registers are supposed to be
695 // handled by the called function. So, as a callee-save
696 // register, `index_reg` _would_ eventually be saved onto
697 // the stack, but it would be too late: we would have
698 // changed its value earlier. Therefore, we manually save
699 // it here into another freely available register,
700 // `free_reg`, chosen of course among the caller-save
701 // registers (as a callee-save `free_reg` register would
702 // exhibit the same problem).
703 //
704 // Note we could have requested a temporary register from
705 // the register allocator instead; but we prefer not to, as
706 // this is a slow path, and we know we can find a
707 // caller-save register that is available.
708 Register free_reg = FindAvailableCallerSaveRegister(codegen);
709 __ Mov(free_reg.W(), index_reg);
710 index_reg = free_reg;
711 index = LocationFrom(index_reg);
712 } else {
713 // The initial register stored in `index_` has already been
714 // saved in the call to art::SlowPathCode::SaveLiveRegisters
715 // (as it is not a callee-save register), so we can freely
716 // use it.
717 }
718 // Shifting the index value contained in `index_reg` by the scale
719 // factor (2) cannot overflow in practice, as the runtime is
720 // unable to allocate object arrays with a size larger than
721 // 2^26 - 1 (that is, 2^28 - 4 bytes).
722 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
723 static_assert(
724 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
725 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
726 __ Add(index_reg, index_reg, Operand(offset_));
727 } else {
728 DCHECK(instruction_->IsInvoke());
729 DCHECK(instruction_->GetLocations()->Intrinsified());
730 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
731 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
732 << instruction_->AsInvoke()->GetIntrinsic();
733 DCHECK_EQ(offset_, 0U);
734 DCHECK(index_.IsRegisterPair());
735 // UnsafeGet's offset location is a register pair, the low
736 // part contains the correct offset.
737 index = index_.ToLow();
738 }
739 }
740
741 // We're moving two or three locations to locations that could
742 // overlap, so we need a parallel move resolver.
743 InvokeRuntimeCallingConvention calling_convention;
744 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
745 parallel_move.AddMove(ref_,
746 LocationFrom(calling_convention.GetRegisterAt(0)),
747 type,
748 nullptr);
749 parallel_move.AddMove(obj_,
750 LocationFrom(calling_convention.GetRegisterAt(1)),
751 type,
752 nullptr);
753 if (index.IsValid()) {
754 parallel_move.AddMove(index,
755 LocationFrom(calling_convention.GetRegisterAt(2)),
756 Primitive::kPrimInt,
757 nullptr);
758 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
759 } else {
760 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
761 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
762 }
763 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
764 instruction_,
765 instruction_->GetDexPc(),
766 this);
767 CheckEntrypointTypes<
768 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
769 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
770
771 RestoreLiveRegisters(codegen, locations);
772
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000773 __ B(GetExitLabel());
774 }
775
776 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
777
778 private:
779 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
780 size_t ref = static_cast<int>(XRegisterFrom(ref_).code());
781 size_t obj = static_cast<int>(XRegisterFrom(obj_).code());
782 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
783 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
784 return Register(VIXLRegCodeFromART(i), kXRegSize);
785 }
786 }
787 // We shall never fail to find a free caller-save register, as
788 // there are more than two core caller-save registers on ARM64
789 // (meaning it is possible to find one which is different from
790 // `ref` and `obj`).
791 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
792 LOG(FATAL) << "Could not find a free register";
793 UNREACHABLE();
794 }
795
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000796 const Location out_;
797 const Location ref_;
798 const Location obj_;
799 const uint32_t offset_;
800 // An additional location containing an index to an array.
801 // Only used for HArrayGet and the UnsafeGetObject &
802 // UnsafeGetObjectVolatile intrinsics.
803 const Location index_;
804
805 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
806};
807
808// Slow path generating a read barrier for a GC root.
809class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
810 public:
811 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000812 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000813 DCHECK(kEmitCompilerReadBarrier);
814 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000815
816 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
817 LocationSummary* locations = instruction_->GetLocations();
818 Primitive::Type type = Primitive::kPrimNot;
819 DCHECK(locations->CanCall());
820 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000821 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
822 << "Unexpected instruction in read barrier for GC root slow path: "
823 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000824
825 __ Bind(GetEntryLabel());
826 SaveLiveRegisters(codegen, locations);
827
828 InvokeRuntimeCallingConvention calling_convention;
829 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
830 // The argument of the ReadBarrierForRootSlow is not a managed
831 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
832 // thus we need a 64-bit move here, and we cannot use
833 //
834 // arm64_codegen->MoveLocation(
835 // LocationFrom(calling_convention.GetRegisterAt(0)),
836 // root_,
837 // type);
838 //
839 // which would emit a 32-bit move, as `type` is a (32-bit wide)
840 // reference type (`Primitive::kPrimNot`).
841 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
842 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
843 instruction_,
844 instruction_->GetDexPc(),
845 this);
846 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
847 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
848
849 RestoreLiveRegisters(codegen, locations);
850 __ B(GetExitLabel());
851 }
852
853 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
854
855 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000856 const Location out_;
857 const Location root_;
858
859 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
860};
861
Alexandre Rames5319def2014-10-23 10:03:10 +0100862#undef __
863
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100864Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100865 Location next_location;
866 if (type == Primitive::kPrimVoid) {
867 LOG(FATAL) << "Unreachable type " << type;
868 }
869
Alexandre Rames542361f2015-01-29 16:57:31 +0000870 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100871 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
872 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000873 } else if (!Primitive::IsFloatingPointType(type) &&
874 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000875 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
876 } else {
877 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000878 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
879 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100880 }
881
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000882 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000883 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100884 return next_location;
885}
886
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100887Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100888 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100889}
890
Serban Constantinescu579885a2015-02-22 20:51:33 +0000891CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
892 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100893 const CompilerOptions& compiler_options,
894 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100895 : CodeGenerator(graph,
896 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000897 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000898 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000899 callee_saved_core_registers.list(),
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000900 callee_saved_fp_registers.list(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100901 compiler_options,
902 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100903 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800904 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100905 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000906 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000907 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100908 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000909 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000910 uint32_literals_(std::less<uint32_t>(),
911 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100912 uint64_literals_(std::less<uint64_t>(),
913 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
914 method_patches_(MethodReferenceComparator(),
915 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
916 call_patches_(MethodReferenceComparator(),
917 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
918 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000919 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
920 boot_image_string_patches_(StringReferenceValueComparator(),
921 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
922 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
923 boot_image_address_patches_(std::less<uint32_t>(),
924 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000925 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000926 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000927}
Alexandre Rames5319def2014-10-23 10:03:10 +0100928
Alexandre Rames67555f72014-11-18 10:55:16 +0000929#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100930
Zheng Xu3927c8b2015-11-18 17:46:25 +0800931void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100932 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800933 jump_table->EmitTable(this);
934 }
935}
936
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000937void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800938 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000939 // Ensure we emit the literal pool.
940 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000941
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000942 CodeGenerator::Finalize(allocator);
943}
944
Zheng Xuad4450e2015-04-17 18:48:56 +0800945void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
946 // Note: There are 6 kinds of moves:
947 // 1. constant -> GPR/FPR (non-cycle)
948 // 2. constant -> stack (non-cycle)
949 // 3. GPR/FPR -> GPR/FPR
950 // 4. GPR/FPR -> stack
951 // 5. stack -> GPR/FPR
952 // 6. stack -> stack (non-cycle)
953 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
954 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
955 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
956 // dependency.
957 vixl_temps_.Open(GetVIXLAssembler());
958}
959
960void ParallelMoveResolverARM64::FinishEmitNativeCode() {
961 vixl_temps_.Close();
962}
963
964Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
965 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
966 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
967 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
968 Location scratch = GetScratchLocation(kind);
969 if (!scratch.Equals(Location::NoLocation())) {
970 return scratch;
971 }
972 // Allocate from VIXL temp registers.
973 if (kind == Location::kRegister) {
974 scratch = LocationFrom(vixl_temps_.AcquireX());
975 } else {
976 DCHECK(kind == Location::kFpuRegister);
977 scratch = LocationFrom(vixl_temps_.AcquireD());
978 }
979 AddScratchLocation(scratch);
980 return scratch;
981}
982
983void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
984 if (loc.IsRegister()) {
985 vixl_temps_.Release(XRegisterFrom(loc));
986 } else {
987 DCHECK(loc.IsFpuRegister());
988 vixl_temps_.Release(DRegisterFrom(loc));
989 }
990 RemoveScratchLocation(loc);
991}
992
Alexandre Rames3e69f162014-12-10 10:36:50 +0000993void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100994 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +0100995 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000996}
997
Alexandre Rames5319def2014-10-23 10:03:10 +0100998void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100999 MacroAssembler* masm = GetVIXLAssembler();
1000 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001001 __ Bind(&frame_entry_label_);
1002
Serban Constantinescu02164b32014-11-13 14:05:07 +00001003 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1004 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001005 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001006 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001007 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001008 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001009 __ Ldr(wzr, MemOperand(temp, 0));
1010 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001011 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001012
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001013 if (!HasEmptyFrame()) {
1014 int frame_size = GetFrameSize();
1015 // Stack layout:
1016 // sp[frame_size - 8] : lr.
1017 // ... : other preserved core registers.
1018 // ... : other preserved fp registers.
1019 // ... : reserved frame space.
1020 // sp[0] : current method.
1021 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001022 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001023 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1024 frame_size - GetCoreSpillSize());
1025 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1026 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001027 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001028}
1029
1030void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001031 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001032 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001033 if (!HasEmptyFrame()) {
1034 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001035 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1036 frame_size - FrameEntrySpillSize());
1037 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1038 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001039 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001040 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001041 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001042 __ Ret();
1043 GetAssembler()->cfi().RestoreState();
1044 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001045}
1046
Zheng Xuda403092015-04-24 17:35:39 +08001047vixl::CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
1048 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
1049 return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize,
1050 core_spill_mask_);
1051}
1052
1053vixl::CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
1054 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1055 GetNumberOfFloatingPointRegisters()));
1056 return vixl::CPURegList(vixl::CPURegister::kFPRegister, vixl::kDRegSize,
1057 fpu_spill_mask_);
1058}
1059
Alexandre Rames5319def2014-10-23 10:03:10 +01001060void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1061 __ Bind(GetLabelOf(block));
1062}
1063
Calin Juravle175dc732015-08-25 15:42:32 +01001064void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1065 DCHECK(location.IsRegister());
1066 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1067}
1068
Calin Juravlee460d1d2015-09-29 04:52:17 +01001069void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1070 if (location.IsRegister()) {
1071 locations->AddTemp(location);
1072 } else {
1073 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1074 }
1075}
1076
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001077void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001078 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001079 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001080 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +01001081 vixl::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001082 if (value_can_be_null) {
1083 __ Cbz(value, &done);
1084 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001085 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
1086 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001087 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001088 if (value_can_be_null) {
1089 __ Bind(&done);
1090 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001091}
1092
David Brazdil58282f42016-01-14 12:45:10 +00001093void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001094 // Blocked core registers:
1095 // lr : Runtime reserved.
1096 // tr : Runtime reserved.
1097 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1098 // ip1 : VIXL core temp.
1099 // ip0 : VIXL core temp.
1100 //
1101 // Blocked fp registers:
1102 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001103 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1104 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001105 while (!reserved_core_registers.IsEmpty()) {
1106 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
1107 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001108
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001109 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001110 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001111 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
1112 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001113
David Brazdil58282f42016-01-14 12:45:10 +00001114 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001115 // Stubs do not save callee-save floating point registers. If the graph
1116 // is debuggable, we need to deal with these registers differently. For
1117 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001118 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1119 while (!reserved_fp_registers_debuggable.IsEmpty()) {
1120 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().code()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001121 }
1122 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001123}
1124
Alexandre Rames3e69f162014-12-10 10:36:50 +00001125size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1126 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1127 __ Str(reg, MemOperand(sp, stack_index));
1128 return kArm64WordSize;
1129}
1130
1131size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1132 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1133 __ Ldr(reg, MemOperand(sp, stack_index));
1134 return kArm64WordSize;
1135}
1136
1137size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1138 FPRegister reg = FPRegister(reg_id, kDRegSize);
1139 __ Str(reg, MemOperand(sp, stack_index));
1140 return kArm64WordSize;
1141}
1142
1143size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1144 FPRegister reg = FPRegister(reg_id, kDRegSize);
1145 __ Ldr(reg, MemOperand(sp, stack_index));
1146 return kArm64WordSize;
1147}
1148
Alexandre Rames5319def2014-10-23 10:03:10 +01001149void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001150 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001151}
1152
1153void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001154 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001155}
1156
Alexandre Rames67555f72014-11-18 10:55:16 +00001157void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001158 if (constant->IsIntConstant()) {
1159 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1160 } else if (constant->IsLongConstant()) {
1161 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1162 } else if (constant->IsNullConstant()) {
1163 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001164 } else if (constant->IsFloatConstant()) {
1165 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1166 } else {
1167 DCHECK(constant->IsDoubleConstant());
1168 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1169 }
1170}
1171
Alexandre Rames3e69f162014-12-10 10:36:50 +00001172
1173static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1174 DCHECK(constant.IsConstant());
1175 HConstant* cst = constant.GetConstant();
1176 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001177 // Null is mapped to a core W register, which we associate with kPrimInt.
1178 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001179 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1180 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1181 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1182}
1183
Calin Juravlee460d1d2015-09-29 04:52:17 +01001184void CodeGeneratorARM64::MoveLocation(Location destination,
1185 Location source,
1186 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001187 if (source.Equals(destination)) {
1188 return;
1189 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001190
1191 // A valid move can always be inferred from the destination and source
1192 // locations. When moving from and to a register, the argument type can be
1193 // used to generate 32bit instead of 64bit moves. In debug mode we also
1194 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001195 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001196
1197 if (destination.IsRegister() || destination.IsFpuRegister()) {
1198 if (unspecified_type) {
1199 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1200 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001201 (src_cst != nullptr && (src_cst->IsIntConstant()
1202 || src_cst->IsFloatConstant()
1203 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001204 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001205 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001206 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001207 // If the source is a double stack slot or a 64bit constant, a 64bit
1208 // type is appropriate. Else the source is a register, and since the
1209 // type has not been specified, we chose a 64bit type to force a 64bit
1210 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001211 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001212 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001213 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001214 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1215 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1216 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001217 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1218 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1219 __ Ldr(dst, StackOperandFrom(source));
1220 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001221 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001222 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001223 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001224 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001225 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001226 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001227 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001228 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1229 ? Primitive::kPrimLong
1230 : Primitive::kPrimInt;
1231 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1232 }
1233 } else {
1234 DCHECK(source.IsFpuRegister());
1235 if (destination.IsRegister()) {
1236 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1237 ? Primitive::kPrimDouble
1238 : Primitive::kPrimFloat;
1239 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1240 } else {
1241 DCHECK(destination.IsFpuRegister());
1242 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001243 }
1244 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001245 } else { // The destination is not a register. It must be a stack slot.
1246 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1247 if (source.IsRegister() || source.IsFpuRegister()) {
1248 if (unspecified_type) {
1249 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001250 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001251 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001252 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001253 }
1254 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001255 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1256 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1257 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001258 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001259 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1260 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001261 UseScratchRegisterScope temps(GetVIXLAssembler());
1262 HConstant* src_cst = source.GetConstant();
1263 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001264 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001265 temp = temps.AcquireW();
1266 } else if (src_cst->IsLongConstant()) {
1267 temp = temps.AcquireX();
1268 } else if (src_cst->IsFloatConstant()) {
1269 temp = temps.AcquireS();
1270 } else {
1271 DCHECK(src_cst->IsDoubleConstant());
1272 temp = temps.AcquireD();
1273 }
1274 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001275 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001276 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001277 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001278 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001279 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001280 // There is generally less pressure on FP registers.
1281 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001282 __ Ldr(temp, StackOperandFrom(source));
1283 __ Str(temp, StackOperandFrom(destination));
1284 }
1285 }
1286}
1287
1288void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001289 CPURegister dst,
1290 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001291 switch (type) {
1292 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001293 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001294 break;
1295 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001296 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001297 break;
1298 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001299 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001300 break;
1301 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001302 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001303 break;
1304 case Primitive::kPrimInt:
1305 case Primitive::kPrimNot:
1306 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001307 case Primitive::kPrimFloat:
1308 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001309 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001310 __ Ldr(dst, src);
1311 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001312 case Primitive::kPrimVoid:
1313 LOG(FATAL) << "Unreachable type " << type;
1314 }
1315}
1316
Calin Juravle77520bc2015-01-12 18:45:46 +00001317void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001318 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001319 const MemOperand& src,
1320 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001321 MacroAssembler* masm = GetVIXLAssembler();
1322 BlockPoolsScope block_pools(masm);
1323 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001324 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001325 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001326
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001327 DCHECK(!src.IsPreIndex());
1328 DCHECK(!src.IsPostIndex());
1329
1330 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001331 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001332 MemOperand base = MemOperand(temp_base);
1333 switch (type) {
1334 case Primitive::kPrimBoolean:
1335 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001336 if (needs_null_check) {
1337 MaybeRecordImplicitNullCheck(instruction);
1338 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001339 break;
1340 case Primitive::kPrimByte:
1341 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001342 if (needs_null_check) {
1343 MaybeRecordImplicitNullCheck(instruction);
1344 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001345 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1346 break;
1347 case Primitive::kPrimChar:
1348 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001349 if (needs_null_check) {
1350 MaybeRecordImplicitNullCheck(instruction);
1351 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001352 break;
1353 case Primitive::kPrimShort:
1354 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001355 if (needs_null_check) {
1356 MaybeRecordImplicitNullCheck(instruction);
1357 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001358 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1359 break;
1360 case Primitive::kPrimInt:
1361 case Primitive::kPrimNot:
1362 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001363 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001364 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001365 if (needs_null_check) {
1366 MaybeRecordImplicitNullCheck(instruction);
1367 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001368 break;
1369 case Primitive::kPrimFloat:
1370 case Primitive::kPrimDouble: {
1371 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001372 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001373
1374 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1375 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001376 if (needs_null_check) {
1377 MaybeRecordImplicitNullCheck(instruction);
1378 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001379 __ Fmov(FPRegister(dst), temp);
1380 break;
1381 }
1382 case Primitive::kPrimVoid:
1383 LOG(FATAL) << "Unreachable type " << type;
1384 }
1385}
1386
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001387void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001388 CPURegister src,
1389 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001390 switch (type) {
1391 case Primitive::kPrimBoolean:
1392 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001393 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001394 break;
1395 case Primitive::kPrimChar:
1396 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001397 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001398 break;
1399 case Primitive::kPrimInt:
1400 case Primitive::kPrimNot:
1401 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001402 case Primitive::kPrimFloat:
1403 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001404 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001405 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001406 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001407 case Primitive::kPrimVoid:
1408 LOG(FATAL) << "Unreachable type " << type;
1409 }
1410}
1411
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001412void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1413 CPURegister src,
1414 const MemOperand& dst) {
1415 UseScratchRegisterScope temps(GetVIXLAssembler());
1416 Register temp_base = temps.AcquireX();
1417
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001418 DCHECK(!dst.IsPreIndex());
1419 DCHECK(!dst.IsPostIndex());
1420
1421 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001422 Operand op = OperandFromMemOperand(dst);
1423 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001424 MemOperand base = MemOperand(temp_base);
1425 switch (type) {
1426 case Primitive::kPrimBoolean:
1427 case Primitive::kPrimByte:
1428 __ Stlrb(Register(src), base);
1429 break;
1430 case Primitive::kPrimChar:
1431 case Primitive::kPrimShort:
1432 __ Stlrh(Register(src), base);
1433 break;
1434 case Primitive::kPrimInt:
1435 case Primitive::kPrimNot:
1436 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001437 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001438 __ Stlr(Register(src), base);
1439 break;
1440 case Primitive::kPrimFloat:
1441 case Primitive::kPrimDouble: {
1442 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001443 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001444
1445 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1446 __ Fmov(temp, FPRegister(src));
1447 __ Stlr(temp, base);
1448 break;
1449 }
1450 case Primitive::kPrimVoid:
1451 LOG(FATAL) << "Unreachable type " << type;
1452 }
1453}
1454
Calin Juravle175dc732015-08-25 15:42:32 +01001455void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1456 HInstruction* instruction,
1457 uint32_t dex_pc,
1458 SlowPathCode* slow_path) {
1459 InvokeRuntime(GetThreadOffset<kArm64WordSize>(entrypoint).Int32Value(),
1460 instruction,
1461 dex_pc,
1462 slow_path);
1463}
1464
Alexandre Rames67555f72014-11-18 10:55:16 +00001465void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1466 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001467 uint32_t dex_pc,
1468 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001469 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001470 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001471 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1472 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001473 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001474}
1475
1476void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1477 vixl::Register class_reg) {
1478 UseScratchRegisterScope temps(GetVIXLAssembler());
1479 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001480 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1481
Serban Constantinescu02164b32014-11-13 14:05:07 +00001482 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001483 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1484 __ Add(temp, class_reg, status_offset);
1485 __ Ldar(temp, HeapOperand(temp));
1486 __ Cmp(temp, mirror::Class::kStatusInitialized);
1487 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001488 __ Bind(slow_path->GetExitLabel());
1489}
Alexandre Rames5319def2014-10-23 10:03:10 +01001490
Roland Levillain44015862016-01-22 11:47:17 +00001491void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001492 BarrierType type = BarrierAll;
1493
1494 switch (kind) {
1495 case MemBarrierKind::kAnyAny:
1496 case MemBarrierKind::kAnyStore: {
1497 type = BarrierAll;
1498 break;
1499 }
1500 case MemBarrierKind::kLoadAny: {
1501 type = BarrierReads;
1502 break;
1503 }
1504 case MemBarrierKind::kStoreStore: {
1505 type = BarrierWrites;
1506 break;
1507 }
1508 default:
1509 LOG(FATAL) << "Unexpected memory barrier " << kind;
1510 }
1511 __ Dmb(InnerShareable, type);
1512}
1513
Serban Constantinescu02164b32014-11-13 14:05:07 +00001514void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1515 HBasicBlock* successor) {
1516 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001517 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1518 if (slow_path == nullptr) {
1519 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1520 instruction->SetSlowPath(slow_path);
1521 codegen_->AddSlowPath(slow_path);
1522 if (successor != nullptr) {
1523 DCHECK(successor->IsLoopHeader());
1524 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1525 }
1526 } else {
1527 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1528 }
1529
Serban Constantinescu02164b32014-11-13 14:05:07 +00001530 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1531 Register temp = temps.AcquireW();
1532
1533 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1534 if (successor == nullptr) {
1535 __ Cbnz(temp, slow_path->GetEntryLabel());
1536 __ Bind(slow_path->GetReturnLabel());
1537 } else {
1538 __ Cbz(temp, codegen_->GetLabelOf(successor));
1539 __ B(slow_path->GetEntryLabel());
1540 // slow_path will return to GetLabelOf(successor).
1541 }
1542}
1543
Alexandre Rames5319def2014-10-23 10:03:10 +01001544InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1545 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001546 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001547 assembler_(codegen->GetAssembler()),
1548 codegen_(codegen) {}
1549
1550#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001551 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001552
1553#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1554
1555enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001556 // Using a base helps identify when we hit such breakpoints.
1557 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001558#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1559 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1560#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1561};
1562
1563#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001564 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001565 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1566 } \
1567 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1568 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1569 locations->SetOut(Location::Any()); \
1570 }
1571 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1572#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1573
1574#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001575#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001576
Alexandre Rames67555f72014-11-18 10:55:16 +00001577void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001578 DCHECK_EQ(instr->InputCount(), 2U);
1579 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1580 Primitive::Type type = instr->GetResultType();
1581 switch (type) {
1582 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001583 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001584 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001585 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001586 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001587 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001588
1589 case Primitive::kPrimFloat:
1590 case Primitive::kPrimDouble:
1591 locations->SetInAt(0, Location::RequiresFpuRegister());
1592 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001593 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001594 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001595
Alexandre Rames5319def2014-10-23 10:03:10 +01001596 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001597 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001598 }
1599}
1600
Alexandre Rames09a99962015-04-15 11:47:56 +01001601void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001602 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1603
1604 bool object_field_get_with_read_barrier =
1605 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001606 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001607 new (GetGraph()->GetArena()) LocationSummary(instruction,
1608 object_field_get_with_read_barrier ?
1609 LocationSummary::kCallOnSlowPath :
1610 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001611 locations->SetInAt(0, Location::RequiresRegister());
1612 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1613 locations->SetOut(Location::RequiresFpuRegister());
1614 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001615 // The output overlaps for an object field get when read barriers
1616 // are enabled: we do not want the load to overwrite the object's
1617 // location, as we need it to emit the read barrier.
1618 locations->SetOut(
1619 Location::RequiresRegister(),
1620 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001621 }
1622}
1623
1624void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1625 const FieldInfo& field_info) {
1626 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001627 LocationSummary* locations = instruction->GetLocations();
1628 Location base_loc = locations->InAt(0);
1629 Location out = locations->Out();
1630 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001631 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001632 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001633 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001634
Roland Levillain44015862016-01-22 11:47:17 +00001635 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1636 // Object FieldGet with Baker's read barrier case.
1637 MacroAssembler* masm = GetVIXLAssembler();
1638 UseScratchRegisterScope temps(masm);
1639 // /* HeapReference<Object> */ out = *(base + offset)
1640 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1641 Register temp = temps.AcquireW();
1642 // Note that potential implicit null checks are handled in this
1643 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1644 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1645 instruction,
1646 out,
1647 base,
1648 offset,
1649 temp,
1650 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001651 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001652 } else {
1653 // General case.
1654 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001655 // Note that a potential implicit null check is handled in this
1656 // CodeGeneratorARM64::LoadAcquire call.
1657 // NB: LoadAcquire will record the pc info if needed.
1658 codegen_->LoadAcquire(
1659 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001660 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001661 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001662 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001663 }
Roland Levillain44015862016-01-22 11:47:17 +00001664 if (field_type == Primitive::kPrimNot) {
1665 // If read barriers are enabled, emit read barriers other than
1666 // Baker's using a slow path (and also unpoison the loaded
1667 // reference, if heap poisoning is enabled).
1668 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1669 }
Roland Levillain4d027112015-07-01 15:41:14 +01001670 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001671}
1672
1673void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1674 LocationSummary* locations =
1675 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1676 locations->SetInAt(0, Location::RequiresRegister());
1677 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1678 locations->SetInAt(1, Location::RequiresFpuRegister());
1679 } else {
1680 locations->SetInAt(1, Location::RequiresRegister());
1681 }
1682}
1683
1684void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001685 const FieldInfo& field_info,
1686 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001687 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001688 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001689
1690 Register obj = InputRegisterAt(instruction, 0);
1691 CPURegister value = InputCPURegisterAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001692 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001693 Offset offset = field_info.GetFieldOffset();
1694 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001695
Roland Levillain4d027112015-07-01 15:41:14 +01001696 {
1697 // We use a block to end the scratch scope before the write barrier, thus
1698 // freeing the temporary registers so they can be used in `MarkGCCard`.
1699 UseScratchRegisterScope temps(GetVIXLAssembler());
1700
1701 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1702 DCHECK(value.IsW());
1703 Register temp = temps.AcquireW();
1704 __ Mov(temp, value.W());
1705 GetAssembler()->PoisonHeapReference(temp.W());
1706 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001707 }
Roland Levillain4d027112015-07-01 15:41:14 +01001708
1709 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001710 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1711 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001712 } else {
1713 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1714 codegen_->MaybeRecordImplicitNullCheck(instruction);
1715 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001716 }
1717
1718 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001719 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001720 }
1721}
1722
Alexandre Rames67555f72014-11-18 10:55:16 +00001723void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001724 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001725
1726 switch (type) {
1727 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001728 case Primitive::kPrimLong: {
1729 Register dst = OutputRegister(instr);
1730 Register lhs = InputRegisterAt(instr, 0);
1731 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001732 if (instr->IsAdd()) {
1733 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001734 } else if (instr->IsAnd()) {
1735 __ And(dst, lhs, rhs);
1736 } else if (instr->IsOr()) {
1737 __ Orr(dst, lhs, rhs);
1738 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001739 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001740 } else if (instr->IsRor()) {
1741 if (rhs.IsImmediate()) {
1742 uint32_t shift = rhs.immediate() & (lhs.SizeInBits() - 1);
1743 __ Ror(dst, lhs, shift);
1744 } else {
1745 // Ensure shift distance is in the same size register as the result. If
1746 // we are rotating a long and the shift comes in a w register originally,
1747 // we don't need to sxtw for use as an x since the shift distances are
1748 // all & reg_bits - 1.
1749 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1750 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001751 } else {
1752 DCHECK(instr->IsXor());
1753 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001754 }
1755 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001756 }
1757 case Primitive::kPrimFloat:
1758 case Primitive::kPrimDouble: {
1759 FPRegister dst = OutputFPRegister(instr);
1760 FPRegister lhs = InputFPRegisterAt(instr, 0);
1761 FPRegister rhs = InputFPRegisterAt(instr, 1);
1762 if (instr->IsAdd()) {
1763 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001764 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001765 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001766 } else {
1767 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001768 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001769 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001770 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001771 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001772 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001773 }
1774}
1775
Serban Constantinescu02164b32014-11-13 14:05:07 +00001776void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1777 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1778
1779 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1780 Primitive::Type type = instr->GetResultType();
1781 switch (type) {
1782 case Primitive::kPrimInt:
1783 case Primitive::kPrimLong: {
1784 locations->SetInAt(0, Location::RequiresRegister());
1785 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1786 locations->SetOut(Location::RequiresRegister());
1787 break;
1788 }
1789 default:
1790 LOG(FATAL) << "Unexpected shift type " << type;
1791 }
1792}
1793
1794void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1795 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1796
1797 Primitive::Type type = instr->GetType();
1798 switch (type) {
1799 case Primitive::kPrimInt:
1800 case Primitive::kPrimLong: {
1801 Register dst = OutputRegister(instr);
1802 Register lhs = InputRegisterAt(instr, 0);
1803 Operand rhs = InputOperandAt(instr, 1);
1804 if (rhs.IsImmediate()) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001805 uint32_t shift_value = rhs.immediate() &
1806 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001807 if (instr->IsShl()) {
1808 __ Lsl(dst, lhs, shift_value);
1809 } else if (instr->IsShr()) {
1810 __ Asr(dst, lhs, shift_value);
1811 } else {
1812 __ Lsr(dst, lhs, shift_value);
1813 }
1814 } else {
1815 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1816
1817 if (instr->IsShl()) {
1818 __ Lsl(dst, lhs, rhs_reg);
1819 } else if (instr->IsShr()) {
1820 __ Asr(dst, lhs, rhs_reg);
1821 } else {
1822 __ Lsr(dst, lhs, rhs_reg);
1823 }
1824 }
1825 break;
1826 }
1827 default:
1828 LOG(FATAL) << "Unexpected shift operation type " << type;
1829 }
1830}
1831
Alexandre Rames5319def2014-10-23 10:03:10 +01001832void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001833 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001834}
1835
1836void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001837 HandleBinaryOp(instruction);
1838}
1839
1840void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1841 HandleBinaryOp(instruction);
1842}
1843
1844void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1845 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001846}
1847
Artem Serov7fc63502016-02-09 17:15:29 +00001848void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001849 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1850 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1851 locations->SetInAt(0, Location::RequiresRegister());
1852 // There is no immediate variant of negated bitwise instructions in AArch64.
1853 locations->SetInAt(1, Location::RequiresRegister());
1854 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1855}
1856
Artem Serov7fc63502016-02-09 17:15:29 +00001857void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001858 Register dst = OutputRegister(instr);
1859 Register lhs = InputRegisterAt(instr, 0);
1860 Register rhs = InputRegisterAt(instr, 1);
1861
1862 switch (instr->GetOpKind()) {
1863 case HInstruction::kAnd:
1864 __ Bic(dst, lhs, rhs);
1865 break;
1866 case HInstruction::kOr:
1867 __ Orn(dst, lhs, rhs);
1868 break;
1869 case HInstruction::kXor:
1870 __ Eon(dst, lhs, rhs);
1871 break;
1872 default:
1873 LOG(FATAL) << "Unreachable";
1874 }
1875}
1876
Alexandre Rames8626b742015-11-25 16:28:08 +00001877void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1878 HArm64DataProcWithShifterOp* instruction) {
1879 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1880 instruction->GetType() == Primitive::kPrimLong);
1881 LocationSummary* locations =
1882 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1883 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1884 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1885 } else {
1886 locations->SetInAt(0, Location::RequiresRegister());
1887 }
1888 locations->SetInAt(1, Location::RequiresRegister());
1889 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1890}
1891
1892void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1893 HArm64DataProcWithShifterOp* instruction) {
1894 Primitive::Type type = instruction->GetType();
1895 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1896 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1897 Register out = OutputRegister(instruction);
1898 Register left;
1899 if (kind != HInstruction::kNeg) {
1900 left = InputRegisterAt(instruction, 0);
1901 }
1902 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1903 // shifter operand operation, the IR generating `right_reg` (input to the type
1904 // conversion) can have a different type from the current instruction's type,
1905 // so we manually indicate the type.
1906 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001907 int64_t shift_amount = instruction->GetShiftAmount() &
1908 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001909
1910 Operand right_operand(0);
1911
1912 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1913 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1914 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1915 } else {
1916 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1917 }
1918
1919 // Logical binary operations do not support extension operations in the
1920 // operand. Note that VIXL would still manage if it was passed by generating
1921 // the extension as a separate instruction.
1922 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1923 DCHECK(!right_operand.IsExtendedRegister() ||
1924 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1925 kind != HInstruction::kNeg));
1926 switch (kind) {
1927 case HInstruction::kAdd:
1928 __ Add(out, left, right_operand);
1929 break;
1930 case HInstruction::kAnd:
1931 __ And(out, left, right_operand);
1932 break;
1933 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001934 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001935 __ Neg(out, right_operand);
1936 break;
1937 case HInstruction::kOr:
1938 __ Orr(out, left, right_operand);
1939 break;
1940 case HInstruction::kSub:
1941 __ Sub(out, left, right_operand);
1942 break;
1943 case HInstruction::kXor:
1944 __ Eor(out, left, right_operand);
1945 break;
1946 default:
1947 LOG(FATAL) << "Unexpected operation kind: " << kind;
1948 UNREACHABLE();
1949 }
1950}
1951
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001952void LocationsBuilderARM64::VisitArm64IntermediateAddress(HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001953 // The read barrier instrumentation does not support the
1954 // HArm64IntermediateAddress instruction yet.
1955 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001956 LocationSummary* locations =
1957 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1958 locations->SetInAt(0, Location::RequiresRegister());
1959 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1960 locations->SetOut(Location::RequiresRegister());
1961}
1962
1963void InstructionCodeGeneratorARM64::VisitArm64IntermediateAddress(
1964 HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001965 // The read barrier instrumentation does not support the
1966 // HArm64IntermediateAddress instruction yet.
1967 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001968 __ Add(OutputRegister(instruction),
1969 InputRegisterAt(instruction, 0),
1970 Operand(InputOperandAt(instruction, 1)));
1971}
1972
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001973void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001974 LocationSummary* locations =
1975 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001976 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
1977 if (instr->GetOpKind() == HInstruction::kSub &&
1978 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00001979 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001980 // Don't allocate register for Mneg instruction.
1981 } else {
1982 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
1983 Location::RequiresRegister());
1984 }
1985 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
1986 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00001987 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1988}
1989
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001990void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001991 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001992 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
1993 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00001994
1995 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
1996 // This fixup should be carried out for all multiply-accumulate instructions:
1997 // madd, msub, smaddl, smsubl, umaddl and umsubl.
1998 if (instr->GetType() == Primitive::kPrimLong &&
1999 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2000 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
2001 vixl::Instruction* prev = masm->GetCursorAddress<vixl::Instruction*>() - vixl::kInstructionSize;
2002 if (prev->IsLoadOrStore()) {
2003 // Make sure we emit only exactly one nop.
2004 vixl::CodeBufferCheckScope scope(masm,
2005 vixl::kInstructionSize,
2006 vixl::CodeBufferCheckScope::kCheck,
2007 vixl::CodeBufferCheckScope::kExactSize);
2008 __ nop();
2009 }
2010 }
2011
2012 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002013 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002014 __ Madd(res, mul_left, mul_right, accumulator);
2015 } else {
2016 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002017 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002018 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002019 __ Mneg(res, mul_left, mul_right);
2020 } else {
2021 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2022 __ Msub(res, mul_left, mul_right, accumulator);
2023 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002024 }
2025}
2026
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002027void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002028 bool object_array_get_with_read_barrier =
2029 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002030 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002031 new (GetGraph()->GetArena()) LocationSummary(instruction,
2032 object_array_get_with_read_barrier ?
2033 LocationSummary::kCallOnSlowPath :
2034 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002035 locations->SetInAt(0, Location::RequiresRegister());
2036 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002037 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2038 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2039 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002040 // The output overlaps in the case of an object array get with
2041 // read barriers enabled: we do not want the move to overwrite the
2042 // array's location, as we need it to emit the read barrier.
2043 locations->SetOut(
2044 Location::RequiresRegister(),
2045 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002046 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002047}
2048
2049void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002050 Primitive::Type type = instruction->GetType();
2051 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002052 LocationSummary* locations = instruction->GetLocations();
2053 Location index = locations->InAt(1);
2054 uint32_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Roland Levillain44015862016-01-22 11:47:17 +00002055 Location out = locations->Out();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002056
Alexandre Ramesd921d642015-04-16 15:07:16 +01002057 MacroAssembler* masm = GetVIXLAssembler();
2058 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002059 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002060 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002061
Roland Levillain44015862016-01-22 11:47:17 +00002062 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2063 // Object ArrayGet with Baker's read barrier case.
2064 Register temp = temps.AcquireW();
2065 // The read barrier instrumentation does not support the
2066 // HArm64IntermediateAddress instruction yet.
2067 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
2068 // Note that a potential implicit null check is handled in the
2069 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2070 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2071 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002072 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002073 // General case.
2074 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002075 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002076 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2077 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002078 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002079 Register temp = temps.AcquireSameSizeAs(obj);
2080 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
2081 // The read barrier instrumentation does not support the
2082 // HArm64IntermediateAddress instruction yet.
2083 DCHECK(!kEmitCompilerReadBarrier);
2084 // We do not need to compute the intermediate address from the array: the
2085 // input instruction has done it already. See the comment in
2086 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2087 if (kIsDebugBuild) {
2088 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2089 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2090 }
2091 temp = obj;
2092 } else {
2093 __ Add(temp, obj, offset);
2094 }
2095 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2096 }
2097
2098 codegen_->Load(type, OutputCPURegister(instruction), source);
2099 codegen_->MaybeRecordImplicitNullCheck(instruction);
2100
2101 if (type == Primitive::kPrimNot) {
2102 static_assert(
2103 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2104 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2105 Location obj_loc = locations->InAt(0);
2106 if (index.IsConstant()) {
2107 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2108 } else {
2109 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2110 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002111 }
Roland Levillain4d027112015-07-01 15:41:14 +01002112 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002113}
2114
Alexandre Rames5319def2014-10-23 10:03:10 +01002115void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2116 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2117 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002118 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002119}
2120
2121void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002122 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002123 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Markodce016e2016-04-28 13:10:02 +01002124 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002125 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002126}
2127
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002128void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002129 Primitive::Type value_type = instruction->GetComponentType();
2130
2131 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2132 bool object_array_set_with_read_barrier =
2133 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002134 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2135 instruction,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002136 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
2137 LocationSummary::kCallOnSlowPath :
2138 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002139 locations->SetInAt(0, Location::RequiresRegister());
2140 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002141 if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002142 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002143 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002144 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002145 }
2146}
2147
2148void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2149 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002150 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002151 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002152 bool needs_write_barrier =
2153 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002154
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002155 Register array = InputRegisterAt(instruction, 0);
2156 CPURegister value = InputCPURegisterAt(instruction, 2);
2157 CPURegister source = value;
2158 Location index = locations->InAt(1);
2159 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2160 MemOperand destination = HeapOperand(array);
2161 MacroAssembler* masm = GetVIXLAssembler();
2162 BlockPoolsScope block_pools(masm);
2163
2164 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002165 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002166 if (index.IsConstant()) {
2167 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2168 destination = HeapOperand(array, offset);
2169 } else {
2170 UseScratchRegisterScope temps(masm);
2171 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002172 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00002173 // The read barrier instrumentation does not support the
2174 // HArm64IntermediateAddress instruction yet.
2175 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002176 // We do not need to compute the intermediate address from the array: the
2177 // input instruction has done it already. See the comment in
2178 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2179 if (kIsDebugBuild) {
2180 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2181 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2182 }
2183 temp = array;
2184 } else {
2185 __ Add(temp, array, offset);
2186 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002187 destination = HeapOperand(temp,
2188 XRegisterFrom(index),
2189 LSL,
2190 Primitive::ComponentSizeShift(value_type));
2191 }
2192 codegen_->Store(value_type, value, destination);
2193 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002194 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002195 DCHECK(needs_write_barrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002196 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002197 vixl::Label done;
2198 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002199 {
2200 // We use a block to end the scratch scope before the write barrier, thus
2201 // freeing the temporary registers so they can be used in `MarkGCCard`.
2202 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002203 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002204 if (index.IsConstant()) {
2205 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002206 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002207 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002208 destination = HeapOperand(temp,
2209 XRegisterFrom(index),
2210 LSL,
2211 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002212 }
2213
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002214 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2215 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2216 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2217
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002218 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002219 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2220 codegen_->AddSlowPath(slow_path);
2221 if (instruction->GetValueCanBeNull()) {
2222 vixl::Label non_zero;
2223 __ Cbnz(Register(value), &non_zero);
2224 if (!index.IsConstant()) {
2225 __ Add(temp, array, offset);
2226 }
2227 __ Str(wzr, destination);
2228 codegen_->MaybeRecordImplicitNullCheck(instruction);
2229 __ B(&done);
2230 __ Bind(&non_zero);
2231 }
2232
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002233 if (kEmitCompilerReadBarrier) {
2234 // When read barriers are enabled, the type checking
2235 // instrumentation requires two read barriers:
2236 //
2237 // __ Mov(temp2, temp);
2238 // // /* HeapReference<Class> */ temp = temp->component_type_
2239 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002240 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002241 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2242 //
2243 // // /* HeapReference<Class> */ temp2 = value->klass_
2244 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002245 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002246 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2247 //
2248 // __ Cmp(temp, temp2);
2249 //
2250 // However, the second read barrier may trash `temp`, as it
2251 // is a temporary register, and as such would not be saved
2252 // along with live registers before calling the runtime (nor
2253 // restored afterwards). So in this case, we bail out and
2254 // delegate the work to the array set slow path.
2255 //
2256 // TODO: Extend the register allocator to support a new
2257 // "(locally) live temp" location so as to avoid always
2258 // going into the slow path when read barriers are enabled.
2259 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002260 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002261 Register temp2 = temps.AcquireSameSizeAs(array);
2262 // /* HeapReference<Class> */ temp = array->klass_
2263 __ Ldr(temp, HeapOperand(array, class_offset));
2264 codegen_->MaybeRecordImplicitNullCheck(instruction);
2265 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2266
2267 // /* HeapReference<Class> */ temp = temp->component_type_
2268 __ Ldr(temp, HeapOperand(temp, component_offset));
2269 // /* HeapReference<Class> */ temp2 = value->klass_
2270 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2271 // If heap poisoning is enabled, no need to unpoison `temp`
2272 // nor `temp2`, as we are comparing two poisoned references.
2273 __ Cmp(temp, temp2);
2274
2275 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2276 vixl::Label do_put;
2277 __ B(eq, &do_put);
2278 // If heap poisoning is enabled, the `temp` reference has
2279 // not been unpoisoned yet; unpoison it now.
2280 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2281
2282 // /* HeapReference<Class> */ temp = temp->super_class_
2283 __ Ldr(temp, HeapOperand(temp, super_offset));
2284 // If heap poisoning is enabled, no need to unpoison
2285 // `temp`, as we are comparing against null below.
2286 __ Cbnz(temp, slow_path->GetEntryLabel());
2287 __ Bind(&do_put);
2288 } else {
2289 __ B(ne, slow_path->GetEntryLabel());
2290 }
2291 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002292 }
2293 }
2294
2295 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002296 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002297 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002298 __ Mov(temp2, value.W());
2299 GetAssembler()->PoisonHeapReference(temp2);
2300 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002301 }
2302
2303 if (!index.IsConstant()) {
2304 __ Add(temp, array, offset);
2305 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002306 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002307
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002308 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002309 codegen_->MaybeRecordImplicitNullCheck(instruction);
2310 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002311 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002312
2313 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2314
2315 if (done.IsLinked()) {
2316 __ Bind(&done);
2317 }
2318
2319 if (slow_path != nullptr) {
2320 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002321 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002322 }
2323}
2324
Alexandre Rames67555f72014-11-18 10:55:16 +00002325void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002326 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2327 ? LocationSummary::kCallOnSlowPath
2328 : LocationSummary::kNoCall;
2329 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002330 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002331 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002332 if (instruction->HasUses()) {
2333 locations->SetOut(Location::SameAsFirstInput());
2334 }
2335}
2336
2337void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002338 BoundsCheckSlowPathARM64* slow_path =
2339 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002340 codegen_->AddSlowPath(slow_path);
2341
2342 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2343 __ B(slow_path->GetEntryLabel(), hs);
2344}
2345
Alexandre Rames67555f72014-11-18 10:55:16 +00002346void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2347 LocationSummary* locations =
2348 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2349 locations->SetInAt(0, Location::RequiresRegister());
2350 if (check->HasUses()) {
2351 locations->SetOut(Location::SameAsFirstInput());
2352 }
2353}
2354
2355void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2356 // We assume the class is not null.
2357 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2358 check->GetLoadClass(), check, check->GetDexPc(), true);
2359 codegen_->AddSlowPath(slow_path);
2360 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2361}
2362
Roland Levillain1a653882016-03-18 18:05:57 +00002363static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2364 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2365 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2366}
2367
2368void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2369 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2370 Location rhs_loc = instruction->GetLocations()->InAt(1);
2371 if (rhs_loc.IsConstant()) {
2372 // 0.0 is the only immediate that can be encoded directly in
2373 // an FCMP instruction.
2374 //
2375 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2376 // specify that in a floating-point comparison, positive zero
2377 // and negative zero are considered equal, so we can use the
2378 // literal 0.0 for both cases here.
2379 //
2380 // Note however that some methods (Float.equal, Float.compare,
2381 // Float.compareTo, Double.equal, Double.compare,
2382 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2383 // StrictMath.min) consider 0.0 to be (strictly) greater than
2384 // -0.0. So if we ever translate calls to these methods into a
2385 // HCompare instruction, we must handle the -0.0 case with
2386 // care here.
2387 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2388 __ Fcmp(lhs_reg, 0.0);
2389 } else {
2390 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2391 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002392}
2393
Serban Constantinescu02164b32014-11-13 14:05:07 +00002394void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002395 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002396 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2397 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002398 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002399 case Primitive::kPrimBoolean:
2400 case Primitive::kPrimByte:
2401 case Primitive::kPrimShort:
2402 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002403 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002404 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002405 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002406 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002407 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2408 break;
2409 }
2410 case Primitive::kPrimFloat:
2411 case Primitive::kPrimDouble: {
2412 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002413 locations->SetInAt(1,
2414 IsFloatingPointZeroConstant(compare->InputAt(1))
2415 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2416 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002417 locations->SetOut(Location::RequiresRegister());
2418 break;
2419 }
2420 default:
2421 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2422 }
2423}
2424
2425void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2426 Primitive::Type in_type = compare->InputAt(0)->GetType();
2427
2428 // 0 if: left == right
2429 // 1 if: left > right
2430 // -1 if: left < right
2431 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002432 case Primitive::kPrimBoolean:
2433 case Primitive::kPrimByte:
2434 case Primitive::kPrimShort:
2435 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002436 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002437 case Primitive::kPrimLong: {
2438 Register result = OutputRegister(compare);
2439 Register left = InputRegisterAt(compare, 0);
2440 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002441 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002442 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2443 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002444 break;
2445 }
2446 case Primitive::kPrimFloat:
2447 case Primitive::kPrimDouble: {
2448 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002449 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002450 __ Cset(result, ne);
2451 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002452 break;
2453 }
2454 default:
2455 LOG(FATAL) << "Unimplemented compare type " << in_type;
2456 }
2457}
2458
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002459void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002460 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002461
2462 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2463 locations->SetInAt(0, Location::RequiresFpuRegister());
2464 locations->SetInAt(1,
2465 IsFloatingPointZeroConstant(instruction->InputAt(1))
2466 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2467 : Location::RequiresFpuRegister());
2468 } else {
2469 // Integer cases.
2470 locations->SetInAt(0, Location::RequiresRegister());
2471 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2472 }
2473
David Brazdilb3e773e2016-01-26 11:28:37 +00002474 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002475 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002476 }
2477}
2478
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002479void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002480 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002481 return;
2482 }
2483
2484 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002485 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002486 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002487
Roland Levillain7f63c522015-07-13 15:54:55 +00002488 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002489 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002490 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002491 } else {
2492 // Integer cases.
2493 Register lhs = InputRegisterAt(instruction, 0);
2494 Operand rhs = InputOperandAt(instruction, 1);
2495 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002496 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002497 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002498}
2499
2500#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2501 M(Equal) \
2502 M(NotEqual) \
2503 M(LessThan) \
2504 M(LessThanOrEqual) \
2505 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002506 M(GreaterThanOrEqual) \
2507 M(Below) \
2508 M(BelowOrEqual) \
2509 M(Above) \
2510 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002511#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002512void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2513void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002514FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002515#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002516#undef FOR_EACH_CONDITION_INSTRUCTION
2517
Zheng Xuc6667102015-05-15 16:08:45 +08002518void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2519 DCHECK(instruction->IsDiv() || instruction->IsRem());
2520
2521 LocationSummary* locations = instruction->GetLocations();
2522 Location second = locations->InAt(1);
2523 DCHECK(second.IsConstant());
2524
2525 Register out = OutputRegister(instruction);
2526 Register dividend = InputRegisterAt(instruction, 0);
2527 int64_t imm = Int64FromConstant(second.GetConstant());
2528 DCHECK(imm == 1 || imm == -1);
2529
2530 if (instruction->IsRem()) {
2531 __ Mov(out, 0);
2532 } else {
2533 if (imm == 1) {
2534 __ Mov(out, dividend);
2535 } else {
2536 __ Neg(out, dividend);
2537 }
2538 }
2539}
2540
2541void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2542 DCHECK(instruction->IsDiv() || instruction->IsRem());
2543
2544 LocationSummary* locations = instruction->GetLocations();
2545 Location second = locations->InAt(1);
2546 DCHECK(second.IsConstant());
2547
2548 Register out = OutputRegister(instruction);
2549 Register dividend = InputRegisterAt(instruction, 0);
2550 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002551 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002552 int ctz_imm = CTZ(abs_imm);
2553
2554 UseScratchRegisterScope temps(GetVIXLAssembler());
2555 Register temp = temps.AcquireSameSizeAs(out);
2556
2557 if (instruction->IsDiv()) {
2558 __ Add(temp, dividend, abs_imm - 1);
2559 __ Cmp(dividend, 0);
2560 __ Csel(out, temp, dividend, lt);
2561 if (imm > 0) {
2562 __ Asr(out, out, ctz_imm);
2563 } else {
2564 __ Neg(out, Operand(out, ASR, ctz_imm));
2565 }
2566 } else {
2567 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2568 __ Asr(temp, dividend, bits - 1);
2569 __ Lsr(temp, temp, bits - ctz_imm);
2570 __ Add(out, dividend, temp);
2571 __ And(out, out, abs_imm - 1);
2572 __ Sub(out, out, temp);
2573 }
2574}
2575
2576void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2577 DCHECK(instruction->IsDiv() || instruction->IsRem());
2578
2579 LocationSummary* locations = instruction->GetLocations();
2580 Location second = locations->InAt(1);
2581 DCHECK(second.IsConstant());
2582
2583 Register out = OutputRegister(instruction);
2584 Register dividend = InputRegisterAt(instruction, 0);
2585 int64_t imm = Int64FromConstant(second.GetConstant());
2586
2587 Primitive::Type type = instruction->GetResultType();
2588 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2589
2590 int64_t magic;
2591 int shift;
2592 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2593
2594 UseScratchRegisterScope temps(GetVIXLAssembler());
2595 Register temp = temps.AcquireSameSizeAs(out);
2596
2597 // temp = get_high(dividend * magic)
2598 __ Mov(temp, magic);
2599 if (type == Primitive::kPrimLong) {
2600 __ Smulh(temp, dividend, temp);
2601 } else {
2602 __ Smull(temp.X(), dividend, temp);
2603 __ Lsr(temp.X(), temp.X(), 32);
2604 }
2605
2606 if (imm > 0 && magic < 0) {
2607 __ Add(temp, temp, dividend);
2608 } else if (imm < 0 && magic > 0) {
2609 __ Sub(temp, temp, dividend);
2610 }
2611
2612 if (shift != 0) {
2613 __ Asr(temp, temp, shift);
2614 }
2615
2616 if (instruction->IsDiv()) {
2617 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2618 } else {
2619 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2620 // TODO: Strength reduction for msub.
2621 Register temp_imm = temps.AcquireSameSizeAs(out);
2622 __ Mov(temp_imm, imm);
2623 __ Msub(out, temp, temp_imm, dividend);
2624 }
2625}
2626
2627void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2628 DCHECK(instruction->IsDiv() || instruction->IsRem());
2629 Primitive::Type type = instruction->GetResultType();
2630 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2631
2632 LocationSummary* locations = instruction->GetLocations();
2633 Register out = OutputRegister(instruction);
2634 Location second = locations->InAt(1);
2635
2636 if (second.IsConstant()) {
2637 int64_t imm = Int64FromConstant(second.GetConstant());
2638
2639 if (imm == 0) {
2640 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2641 } else if (imm == 1 || imm == -1) {
2642 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002643 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002644 DivRemByPowerOfTwo(instruction);
2645 } else {
2646 DCHECK(imm <= -2 || imm >= 2);
2647 GenerateDivRemWithAnyConstant(instruction);
2648 }
2649 } else {
2650 Register dividend = InputRegisterAt(instruction, 0);
2651 Register divisor = InputRegisterAt(instruction, 1);
2652 if (instruction->IsDiv()) {
2653 __ Sdiv(out, dividend, divisor);
2654 } else {
2655 UseScratchRegisterScope temps(GetVIXLAssembler());
2656 Register temp = temps.AcquireSameSizeAs(out);
2657 __ Sdiv(temp, dividend, divisor);
2658 __ Msub(out, temp, divisor, dividend);
2659 }
2660 }
2661}
2662
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002663void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2664 LocationSummary* locations =
2665 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2666 switch (div->GetResultType()) {
2667 case Primitive::kPrimInt:
2668 case Primitive::kPrimLong:
2669 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002670 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002671 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2672 break;
2673
2674 case Primitive::kPrimFloat:
2675 case Primitive::kPrimDouble:
2676 locations->SetInAt(0, Location::RequiresFpuRegister());
2677 locations->SetInAt(1, Location::RequiresFpuRegister());
2678 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2679 break;
2680
2681 default:
2682 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2683 }
2684}
2685
2686void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2687 Primitive::Type type = div->GetResultType();
2688 switch (type) {
2689 case Primitive::kPrimInt:
2690 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002691 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002692 break;
2693
2694 case Primitive::kPrimFloat:
2695 case Primitive::kPrimDouble:
2696 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2697 break;
2698
2699 default:
2700 LOG(FATAL) << "Unexpected div type " << type;
2701 }
2702}
2703
Alexandre Rames67555f72014-11-18 10:55:16 +00002704void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002705 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2706 ? LocationSummary::kCallOnSlowPath
2707 : LocationSummary::kNoCall;
2708 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002709 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2710 if (instruction->HasUses()) {
2711 locations->SetOut(Location::SameAsFirstInput());
2712 }
2713}
2714
2715void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2716 SlowPathCodeARM64* slow_path =
2717 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2718 codegen_->AddSlowPath(slow_path);
2719 Location value = instruction->GetLocations()->InAt(0);
2720
Alexandre Rames3e69f162014-12-10 10:36:50 +00002721 Primitive::Type type = instruction->GetType();
2722
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002723 if (!Primitive::IsIntegralType(type)) {
2724 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002725 return;
2726 }
2727
Alexandre Rames67555f72014-11-18 10:55:16 +00002728 if (value.IsConstant()) {
2729 int64_t divisor = Int64ConstantFrom(value);
2730 if (divisor == 0) {
2731 __ B(slow_path->GetEntryLabel());
2732 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002733 // A division by a non-null constant is valid. We don't need to perform
2734 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002735 }
2736 } else {
2737 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2738 }
2739}
2740
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002741void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2742 LocationSummary* locations =
2743 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2744 locations->SetOut(Location::ConstantLocation(constant));
2745}
2746
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002747void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2748 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002749 // Will be generated at use site.
2750}
2751
Alexandre Rames5319def2014-10-23 10:03:10 +01002752void LocationsBuilderARM64::VisitExit(HExit* exit) {
2753 exit->SetLocations(nullptr);
2754}
2755
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002756void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002757}
2758
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002759void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2760 LocationSummary* locations =
2761 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2762 locations->SetOut(Location::ConstantLocation(constant));
2763}
2764
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002765void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002766 // Will be generated at use site.
2767}
2768
David Brazdilfc6a86a2015-06-26 10:33:45 +00002769void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002770 DCHECK(!successor->IsExitBlock());
2771 HBasicBlock* block = got->GetBlock();
2772 HInstruction* previous = got->GetPrevious();
2773 HLoopInformation* info = block->GetLoopInformation();
2774
David Brazdil46e2a392015-03-16 17:31:52 +00002775 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002776 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2777 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2778 return;
2779 }
2780 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2781 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2782 }
2783 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002784 __ B(codegen_->GetLabelOf(successor));
2785 }
2786}
2787
David Brazdilfc6a86a2015-06-26 10:33:45 +00002788void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2789 got->SetLocations(nullptr);
2790}
2791
2792void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2793 HandleGoto(got, got->GetSuccessor());
2794}
2795
2796void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2797 try_boundary->SetLocations(nullptr);
2798}
2799
2800void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2801 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2802 if (!successor->IsExitBlock()) {
2803 HandleGoto(try_boundary, successor);
2804 }
2805}
2806
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002807void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002808 size_t condition_input_index,
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002809 vixl::Label* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00002810 vixl::Label* false_target) {
2811 // FP branching requires both targets to be explicit. If either of the targets
2812 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
2813 vixl::Label fallthrough_target;
2814 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002815
David Brazdil0debae72015-11-12 18:37:00 +00002816 if (true_target == nullptr && false_target == nullptr) {
2817 // Nothing to do. The code always falls through.
2818 return;
2819 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002820 // Constant condition, statically compared against "true" (integer value 1).
2821 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002822 if (true_target != nullptr) {
2823 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002824 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002825 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002826 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002827 if (false_target != nullptr) {
2828 __ B(false_target);
2829 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002830 }
David Brazdil0debae72015-11-12 18:37:00 +00002831 return;
2832 }
2833
2834 // The following code generates these patterns:
2835 // (1) true_target == nullptr && false_target != nullptr
2836 // - opposite condition true => branch to false_target
2837 // (2) true_target != nullptr && false_target == nullptr
2838 // - condition true => branch to true_target
2839 // (3) true_target != nullptr && false_target != nullptr
2840 // - condition true => branch to true_target
2841 // - branch to false_target
2842 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002843 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002844 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002845 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002846 if (true_target == nullptr) {
2847 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2848 } else {
2849 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2850 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002851 } else {
2852 // The condition instruction has not been materialized, use its inputs as
2853 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002854 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002855
David Brazdil0debae72015-11-12 18:37:00 +00002856 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002857 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002858 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002859 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002860 IfCondition opposite_condition = condition->GetOppositeCondition();
2861 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002862 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002863 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002864 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002865 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002866 // Integer cases.
2867 Register lhs = InputRegisterAt(condition, 0);
2868 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002869
2870 Condition arm64_cond;
2871 vixl::Label* non_fallthrough_target;
2872 if (true_target == nullptr) {
2873 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2874 non_fallthrough_target = false_target;
2875 } else {
2876 arm64_cond = ARM64Condition(condition->GetCondition());
2877 non_fallthrough_target = true_target;
2878 }
2879
Aart Bik086d27e2016-01-20 17:02:00 -08002880 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
2881 rhs.IsImmediate() && (rhs.immediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002882 switch (arm64_cond) {
2883 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002884 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002885 break;
2886 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002887 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002888 break;
2889 case lt:
2890 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002891 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002892 break;
2893 case ge:
2894 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002895 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002896 break;
2897 default:
2898 // Without the `static_cast` the compiler throws an error for
2899 // `-Werror=sign-promo`.
2900 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2901 }
2902 } else {
2903 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002904 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002905 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002906 }
2907 }
David Brazdil0debae72015-11-12 18:37:00 +00002908
2909 // If neither branch falls through (case 3), the conditional branch to `true_target`
2910 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2911 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002912 __ B(false_target);
2913 }
David Brazdil0debae72015-11-12 18:37:00 +00002914
2915 if (fallthrough_target.IsLinked()) {
2916 __ Bind(&fallthrough_target);
2917 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002918}
2919
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002920void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2921 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002922 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002923 locations->SetInAt(0, Location::RequiresRegister());
2924 }
2925}
2926
2927void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002928 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2929 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
2930 vixl::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2931 nullptr : codegen_->GetLabelOf(true_successor);
2932 vixl::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2933 nullptr : codegen_->GetLabelOf(false_successor);
2934 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002935}
2936
2937void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2938 LocationSummary* locations = new (GetGraph()->GetArena())
2939 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002940 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002941 locations->SetInAt(0, Location::RequiresRegister());
2942 }
2943}
2944
2945void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002946 SlowPathCodeARM64* slow_path =
2947 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002948 GenerateTestAndBranch(deoptimize,
2949 /* condition_input_index */ 0,
2950 slow_path->GetEntryLabel(),
2951 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002952}
2953
David Brazdilc0b601b2016-02-08 14:20:45 +00002954enum SelectVariant {
2955 kCsel,
2956 kCselFalseConst,
2957 kCselTrueConst,
2958 kFcsel,
2959};
2960
2961static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2962 return condition->IsCondition() &&
2963 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2964}
2965
2966static inline bool IsRecognizedCselConstant(HInstruction* constant) {
2967 if (constant->IsConstant()) {
2968 int64_t value = Int64FromConstant(constant->AsConstant());
2969 if ((value == -1) || (value == 0) || (value == 1)) {
2970 return true;
2971 }
2972 }
2973 return false;
2974}
2975
2976static inline SelectVariant GetSelectVariant(HSelect* select) {
2977 if (Primitive::IsFloatingPointType(select->GetType())) {
2978 return kFcsel;
2979 } else if (IsRecognizedCselConstant(select->GetFalseValue())) {
2980 return kCselFalseConst;
2981 } else if (IsRecognizedCselConstant(select->GetTrueValue())) {
2982 return kCselTrueConst;
2983 } else {
2984 return kCsel;
2985 }
2986}
2987
2988static inline bool HasSwappedInputs(SelectVariant variant) {
2989 return variant == kCselTrueConst;
2990}
2991
2992static inline Condition GetConditionForSelect(HCondition* condition, SelectVariant variant) {
2993 IfCondition cond = HasSwappedInputs(variant) ? condition->GetOppositeCondition()
2994 : condition->GetCondition();
2995 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
2996 : ARM64Condition(cond);
2997}
2998
David Brazdil74eb1b22015-12-14 11:44:01 +00002999void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3000 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
David Brazdilc0b601b2016-02-08 14:20:45 +00003001 switch (GetSelectVariant(select)) {
3002 case kCsel:
3003 locations->SetInAt(0, Location::RequiresRegister());
3004 locations->SetInAt(1, Location::RequiresRegister());
3005 locations->SetOut(Location::RequiresRegister());
3006 break;
3007 case kCselFalseConst:
3008 locations->SetInAt(0, Location::ConstantLocation(select->InputAt(0)->AsConstant()));
3009 locations->SetInAt(1, Location::RequiresRegister());
3010 locations->SetOut(Location::RequiresRegister());
3011 break;
3012 case kCselTrueConst:
3013 locations->SetInAt(0, Location::RequiresRegister());
3014 locations->SetInAt(1, Location::ConstantLocation(select->InputAt(1)->AsConstant()));
3015 locations->SetOut(Location::RequiresRegister());
3016 break;
3017 case kFcsel:
3018 locations->SetInAt(0, Location::RequiresFpuRegister());
3019 locations->SetInAt(1, Location::RequiresFpuRegister());
3020 locations->SetOut(Location::RequiresFpuRegister());
3021 break;
David Brazdil74eb1b22015-12-14 11:44:01 +00003022 }
3023 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3024 locations->SetInAt(2, Location::RequiresRegister());
3025 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003026}
3027
3028void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003029 HInstruction* cond = select->GetCondition();
3030 SelectVariant variant = GetSelectVariant(select);
3031 Condition csel_cond;
3032
3033 if (IsBooleanValueOrMaterializedCondition(cond)) {
3034 if (cond->IsCondition() && cond->GetNext() == select) {
3035 // Condition codes set from previous instruction.
3036 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3037 } else {
3038 __ Cmp(InputRegisterAt(select, 2), 0);
3039 csel_cond = HasSwappedInputs(variant) ? eq : ne;
3040 }
3041 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003042 GenerateFcmp(cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003043 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3044 } else {
3045 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
3046 csel_cond = GetConditionForSelect(cond->AsCondition(), variant);
3047 }
3048
3049 switch (variant) {
3050 case kCsel:
3051 case kCselFalseConst:
3052 __ Csel(OutputRegister(select),
3053 InputRegisterAt(select, 1),
3054 InputOperandAt(select, 0),
3055 csel_cond);
3056 break;
3057 case kCselTrueConst:
3058 __ Csel(OutputRegister(select),
3059 InputRegisterAt(select, 0),
3060 InputOperandAt(select, 1),
3061 csel_cond);
3062 break;
3063 case kFcsel:
3064 __ Fcsel(OutputFPRegister(select),
3065 InputFPRegisterAt(select, 1),
3066 InputFPRegisterAt(select, 0),
3067 csel_cond);
3068 break;
3069 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003070}
3071
David Srbecky0cf44932015-12-09 14:09:59 +00003072void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3073 new (GetGraph()->GetArena()) LocationSummary(info);
3074}
3075
David Srbeckyd28f4a02016-03-14 17:14:24 +00003076void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3077 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003078}
3079
3080void CodeGeneratorARM64::GenerateNop() {
3081 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003082}
3083
Alexandre Rames5319def2014-10-23 10:03:10 +01003084void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003085 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003086}
3087
3088void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003089 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003090}
3091
3092void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003093 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003094}
3095
3096void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003097 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003098}
3099
Roland Levillain44015862016-01-22 11:47:17 +00003100static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3101 return kEmitCompilerReadBarrier &&
3102 (kUseBakerReadBarrier ||
3103 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3104 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3105 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3106}
3107
Alexandre Rames67555f72014-11-18 10:55:16 +00003108void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003109 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003110 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3111 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003112 case TypeCheckKind::kExactCheck:
3113 case TypeCheckKind::kAbstractClassCheck:
3114 case TypeCheckKind::kClassHierarchyCheck:
3115 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003116 call_kind =
3117 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003118 break;
3119 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003120 case TypeCheckKind::kUnresolvedCheck:
3121 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003122 call_kind = LocationSummary::kCallOnSlowPath;
3123 break;
3124 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003125
Alexandre Rames67555f72014-11-18 10:55:16 +00003126 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003127 locations->SetInAt(0, Location::RequiresRegister());
3128 locations->SetInAt(1, Location::RequiresRegister());
3129 // The "out" register is used as a temporary, so it overlaps with the inputs.
3130 // Note that TypeCheckSlowPathARM64 uses this register too.
3131 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3132 // When read barriers are enabled, we need a temporary register for
3133 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003134 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003135 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003136 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003137}
3138
3139void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003140 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003141 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003142 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003143 Register obj = InputRegisterAt(instruction, 0);
3144 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003145 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003146 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003147 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3148 locations->GetTemp(0) :
3149 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003150 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3151 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3152 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3153 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003154
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003155 vixl::Label done, zero;
3156 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003157
3158 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003159 // Avoid null check if we know `obj` is not null.
3160 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003161 __ Cbz(obj, &zero);
3162 }
3163
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003164 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003165 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003166
Roland Levillain44015862016-01-22 11:47:17 +00003167 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003168 case TypeCheckKind::kExactCheck: {
3169 __ Cmp(out, cls);
3170 __ Cset(out, eq);
3171 if (zero.IsLinked()) {
3172 __ B(&done);
3173 }
3174 break;
3175 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003176
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003177 case TypeCheckKind::kAbstractClassCheck: {
3178 // If the class is abstract, we eagerly fetch the super class of the
3179 // object to avoid doing a comparison we know will fail.
3180 vixl::Label loop, success;
3181 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003182 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003183 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003184 // If `out` is null, we use it for the result, and jump to `done`.
3185 __ Cbz(out, &done);
3186 __ Cmp(out, cls);
3187 __ B(ne, &loop);
3188 __ Mov(out, 1);
3189 if (zero.IsLinked()) {
3190 __ B(&done);
3191 }
3192 break;
3193 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003194
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003195 case TypeCheckKind::kClassHierarchyCheck: {
3196 // Walk over the class hierarchy to find a match.
3197 vixl::Label loop, success;
3198 __ Bind(&loop);
3199 __ Cmp(out, cls);
3200 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003201 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003202 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003203 __ Cbnz(out, &loop);
3204 // If `out` is null, we use it for the result, and jump to `done`.
3205 __ B(&done);
3206 __ Bind(&success);
3207 __ Mov(out, 1);
3208 if (zero.IsLinked()) {
3209 __ B(&done);
3210 }
3211 break;
3212 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003213
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003214 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003215 // Do an exact check.
3216 vixl::Label exact_check;
3217 __ Cmp(out, cls);
3218 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003219 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003220 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003221 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003222 // If `out` is null, we use it for the result, and jump to `done`.
3223 __ Cbz(out, &done);
3224 __ Ldrh(out, HeapOperand(out, primitive_offset));
3225 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3226 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003227 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003228 __ Mov(out, 1);
3229 __ B(&done);
3230 break;
3231 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003232
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003233 case TypeCheckKind::kArrayCheck: {
3234 __ Cmp(out, cls);
3235 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003236 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3237 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003238 codegen_->AddSlowPath(slow_path);
3239 __ B(ne, slow_path->GetEntryLabel());
3240 __ Mov(out, 1);
3241 if (zero.IsLinked()) {
3242 __ B(&done);
3243 }
3244 break;
3245 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003246
Calin Juravle98893e12015-10-02 21:05:03 +01003247 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003248 case TypeCheckKind::kInterfaceCheck: {
3249 // Note that we indeed only call on slow path, but we always go
3250 // into the slow path for the unresolved and interface check
3251 // cases.
3252 //
3253 // We cannot directly call the InstanceofNonTrivial runtime
3254 // entry point without resorting to a type checking slow path
3255 // here (i.e. by calling InvokeRuntime directly), as it would
3256 // require to assign fixed registers for the inputs of this
3257 // HInstanceOf instruction (following the runtime calling
3258 // convention), which might be cluttered by the potential first
3259 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003260 //
3261 // TODO: Introduce a new runtime entry point taking the object
3262 // to test (instead of its class) as argument, and let it deal
3263 // with the read barrier issues. This will let us refactor this
3264 // case of the `switch` code as it was previously (with a direct
3265 // call to the runtime not using a type checking slow path).
3266 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003267 DCHECK(locations->OnlyCallsOnSlowPath());
3268 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3269 /* is_fatal */ false);
3270 codegen_->AddSlowPath(slow_path);
3271 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003272 if (zero.IsLinked()) {
3273 __ B(&done);
3274 }
3275 break;
3276 }
3277 }
3278
3279 if (zero.IsLinked()) {
3280 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003281 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003282 }
3283
3284 if (done.IsLinked()) {
3285 __ Bind(&done);
3286 }
3287
3288 if (slow_path != nullptr) {
3289 __ Bind(slow_path->GetExitLabel());
3290 }
3291}
3292
3293void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3294 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3295 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3296
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003297 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3298 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003299 case TypeCheckKind::kExactCheck:
3300 case TypeCheckKind::kAbstractClassCheck:
3301 case TypeCheckKind::kClassHierarchyCheck:
3302 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003303 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3304 LocationSummary::kCallOnSlowPath :
3305 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003306 break;
3307 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003308 case TypeCheckKind::kUnresolvedCheck:
3309 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003310 call_kind = LocationSummary::kCallOnSlowPath;
3311 break;
3312 }
3313
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003314 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3315 locations->SetInAt(0, Location::RequiresRegister());
3316 locations->SetInAt(1, Location::RequiresRegister());
3317 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3318 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003319 // When read barriers are enabled, we need an additional temporary
3320 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003321 if (TypeCheckNeedsATemporary(type_check_kind)) {
3322 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003323 }
3324}
3325
3326void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003327 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003328 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003329 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003330 Register obj = InputRegisterAt(instruction, 0);
3331 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003332 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003333 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3334 locations->GetTemp(1) :
3335 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003336 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003337 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3338 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3339 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3340 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003341
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003342 bool is_type_check_slow_path_fatal =
3343 (type_check_kind == TypeCheckKind::kExactCheck ||
3344 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3345 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3346 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3347 !instruction->CanThrowIntoCatchBlock();
3348 SlowPathCodeARM64* type_check_slow_path =
3349 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3350 is_type_check_slow_path_fatal);
3351 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003352
3353 vixl::Label done;
3354 // Avoid null check if we know obj is not null.
3355 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003356 __ Cbz(obj, &done);
3357 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003358
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003359 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003360 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003361
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003362 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003363 case TypeCheckKind::kExactCheck:
3364 case TypeCheckKind::kArrayCheck: {
3365 __ Cmp(temp, cls);
3366 // Jump to slow path for throwing the exception or doing a
3367 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003368 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003369 break;
3370 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003371
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003372 case TypeCheckKind::kAbstractClassCheck: {
3373 // If the class is abstract, we eagerly fetch the super class of the
3374 // object to avoid doing a comparison we know will fail.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003375 vixl::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003376 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003377 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003378 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003379
3380 // If the class reference currently in `temp` is not null, jump
3381 // to the `compare_classes` label to compare it with the checked
3382 // class.
3383 __ Cbnz(temp, &compare_classes);
3384 // Otherwise, jump to the slow path to throw the exception.
3385 //
3386 // But before, move back the object's class into `temp` before
3387 // going into the slow path, as it has been overwritten in the
3388 // meantime.
3389 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003390 GenerateReferenceLoadTwoRegisters(
3391 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003392 __ B(type_check_slow_path->GetEntryLabel());
3393
3394 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003395 __ Cmp(temp, cls);
3396 __ B(ne, &loop);
3397 break;
3398 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003399
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003400 case TypeCheckKind::kClassHierarchyCheck: {
3401 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003402 vixl::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003403 __ Bind(&loop);
3404 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003405 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003406
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003407 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003408 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003409
3410 // If the class reference currently in `temp` is not null, jump
3411 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003412 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003413 // Otherwise, jump to the slow path to throw the exception.
3414 //
3415 // But before, move back the object's class into `temp` before
3416 // going into the slow path, as it has been overwritten in the
3417 // meantime.
3418 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003419 GenerateReferenceLoadTwoRegisters(
3420 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003421 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003422 break;
3423 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003424
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003425 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003426 // Do an exact check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003427 vixl::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003428 __ Cmp(temp, cls);
3429 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003430
3431 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003432 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003433 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003434
3435 // If the component type is not null (i.e. the object is indeed
3436 // an array), jump to label `check_non_primitive_component_type`
3437 // to further check that this component type is not a primitive
3438 // type.
3439 __ Cbnz(temp, &check_non_primitive_component_type);
3440 // Otherwise, jump to the slow path to throw the exception.
3441 //
3442 // But before, move back the object's class into `temp` before
3443 // going into the slow path, as it has been overwritten in the
3444 // meantime.
3445 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003446 GenerateReferenceLoadTwoRegisters(
3447 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003448 __ B(type_check_slow_path->GetEntryLabel());
3449
3450 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003451 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3452 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003453 __ Cbz(temp, &done);
3454 // Same comment as above regarding `temp` and the slow path.
3455 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003456 GenerateReferenceLoadTwoRegisters(
3457 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003458 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003459 break;
3460 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003461
Calin Juravle98893e12015-10-02 21:05:03 +01003462 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003463 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003464 // We always go into the type check slow path for the unresolved
3465 // and interface check cases.
3466 //
3467 // We cannot directly call the CheckCast runtime entry point
3468 // without resorting to a type checking slow path here (i.e. by
3469 // calling InvokeRuntime directly), as it would require to
3470 // assign fixed registers for the inputs of this HInstanceOf
3471 // instruction (following the runtime calling convention), which
3472 // might be cluttered by the potential first read barrier
3473 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003474 //
3475 // TODO: Introduce a new runtime entry point taking the object
3476 // to test (instead of its class) as argument, and let it deal
3477 // with the read barrier issues. This will let us refactor this
3478 // case of the `switch` code as it was previously (with a direct
3479 // call to the runtime not using a type checking slow path).
3480 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003481 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003482 break;
3483 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003484 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003485
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003486 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003487}
3488
Alexandre Rames5319def2014-10-23 10:03:10 +01003489void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3490 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3491 locations->SetOut(Location::ConstantLocation(constant));
3492}
3493
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003494void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003495 // Will be generated at use site.
3496}
3497
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003498void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3499 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3500 locations->SetOut(Location::ConstantLocation(constant));
3501}
3502
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003503void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003504 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003505}
3506
Calin Juravle175dc732015-08-25 15:42:32 +01003507void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3508 // The trampoline uses the same calling convention as dex calling conventions,
3509 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3510 // the method_idx.
3511 HandleInvoke(invoke);
3512}
3513
3514void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3515 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3516}
3517
Alexandre Rames5319def2014-10-23 10:03:10 +01003518void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003519 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003520 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003521}
3522
Alexandre Rames67555f72014-11-18 10:55:16 +00003523void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3524 HandleInvoke(invoke);
3525}
3526
3527void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3528 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003529 LocationSummary* locations = invoke->GetLocations();
3530 Register temp = XRegisterFrom(locations->GetTemp(0));
Mathieu Chartiere401d142015-04-22 13:56:20 -07003531 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
3532 invoke->GetImtIndex() % mirror::Class::kImtSize, kArm64PointerSize).Uint32Value();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003533 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003534 Offset class_offset = mirror::Object::ClassOffset();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003535 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003536
3537 // The register ip1 is required to be used for the hidden argument in
3538 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003539 MacroAssembler* masm = GetVIXLAssembler();
3540 UseScratchRegisterScope scratch_scope(masm);
3541 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003542 scratch_scope.Exclude(ip1);
3543 __ Mov(ip1, invoke->GetDexMethodIndex());
3544
Alexandre Rames67555f72014-11-18 10:55:16 +00003545 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003546 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003547 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003548 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003549 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003550 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003551 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003552 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003553 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003554 // Instead of simply (possibly) unpoisoning `temp` here, we should
3555 // emit a read barrier for the previous class reference load.
3556 // However this is not required in practice, as this is an
3557 // intermediate/temporary reference and because the current
3558 // concurrent copying collector keeps the from-space memory
3559 // intact/accessible until the end of the marking phase (the
3560 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003561 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Alexandre Rames67555f72014-11-18 10:55:16 +00003562 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003563 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003564 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003565 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003566 // lr();
3567 __ Blr(lr);
3568 DCHECK(!codegen_->IsLeafMethod());
3569 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3570}
3571
3572void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003573 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3574 if (intrinsic.TryDispatch(invoke)) {
3575 return;
3576 }
3577
Alexandre Rames67555f72014-11-18 10:55:16 +00003578 HandleInvoke(invoke);
3579}
3580
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003581void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003582 // Explicit clinit checks triggered by static invokes must have been pruned by
3583 // art::PrepareForRegisterAllocation.
3584 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003585
Andreas Gampe878d58c2015-01-15 23:24:00 -08003586 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3587 if (intrinsic.TryDispatch(invoke)) {
3588 return;
3589 }
3590
Alexandre Rames67555f72014-11-18 10:55:16 +00003591 HandleInvoke(invoke);
3592}
3593
Andreas Gampe878d58c2015-01-15 23:24:00 -08003594static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3595 if (invoke->GetLocations()->Intrinsified()) {
3596 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3597 intrinsic.Dispatch(invoke);
3598 return true;
3599 }
3600 return false;
3601}
3602
Vladimir Markodc151b22015-10-15 18:02:30 +01003603HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3604 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3605 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003606 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003607 return desired_dispatch_info;
3608}
3609
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003610void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003611 // For better instruction scheduling we load the direct code pointer before the method pointer.
3612 bool direct_code_loaded = false;
3613 switch (invoke->GetCodePtrLocation()) {
3614 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3615 // LR = code address from literal pool with link-time patch.
3616 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3617 direct_code_loaded = true;
3618 break;
3619 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3620 // LR = invoke->GetDirectCodePtr();
3621 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3622 direct_code_loaded = true;
3623 break;
3624 default:
3625 break;
3626 }
3627
Andreas Gampe878d58c2015-01-15 23:24:00 -08003628 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003629 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3630 switch (invoke->GetMethodLoadKind()) {
3631 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3632 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003633 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003634 break;
3635 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003636 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003637 break;
3638 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3639 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003640 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003641 break;
3642 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3643 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003644 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003645 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3646 break;
3647 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3648 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003649 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3650 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
3651 vixl::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003652 {
3653 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003654 __ Bind(adrp_label);
3655 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003656 }
Vladimir Marko58155012015-08-19 12:49:41 +00003657 // Add LDR with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003658 vixl::Label* ldr_label =
3659 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003660 {
3661 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003662 __ Bind(ldr_label);
3663 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003664 }
Vladimir Marko58155012015-08-19 12:49:41 +00003665 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003666 }
Vladimir Marko58155012015-08-19 12:49:41 +00003667 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003668 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003669 Register reg = XRegisterFrom(temp);
3670 Register method_reg;
3671 if (current_method.IsRegister()) {
3672 method_reg = XRegisterFrom(current_method);
3673 } else {
3674 DCHECK(invoke->GetLocations()->Intrinsified());
3675 DCHECK(!current_method.IsValid());
3676 method_reg = reg;
3677 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3678 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003679
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003680 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003681 __ Ldr(reg.X(),
3682 MemOperand(method_reg.X(),
3683 ArtMethod::DexCacheResolvedMethodsOffset(kArm64WordSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003684 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003685 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3686 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003687 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3688 break;
3689 }
3690 }
3691
3692 switch (invoke->GetCodePtrLocation()) {
3693 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3694 __ Bl(&frame_entry_label_);
3695 break;
3696 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3697 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
3698 vixl::Label* label = &relative_call_patches_.back().label;
Alexandre Rames6dc01742015-11-12 14:44:19 +00003699 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
3700 __ Bind(label);
3701 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003702 break;
3703 }
3704 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3705 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3706 // LR prepared above for better instruction scheduling.
3707 DCHECK(direct_code_loaded);
3708 // lr()
3709 __ Blr(lr);
3710 break;
3711 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3712 // LR = callee_method->entry_point_from_quick_compiled_code_;
3713 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003714 XRegisterFrom(callee_method),
Vladimir Marko58155012015-08-19 12:49:41 +00003715 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value()));
3716 // lr()
3717 __ Blr(lr);
3718 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003719 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003720
Andreas Gampe878d58c2015-01-15 23:24:00 -08003721 DCHECK(!IsLeafMethod());
3722}
3723
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003724void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003725 // Use the calling convention instead of the location of the receiver, as
3726 // intrinsics may have put the receiver in a different register. In the intrinsics
3727 // slow path, the arguments have been moved to the right place, so here we are
3728 // guaranteed that the receiver is the first register of the calling convention.
3729 InvokeDexCallingConvention calling_convention;
3730 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003731 Register temp = XRegisterFrom(temp_in);
3732 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3733 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3734 Offset class_offset = mirror::Object::ClassOffset();
3735 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
3736
3737 BlockPoolsScope block_pools(GetVIXLAssembler());
3738
3739 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003740 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003741 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003742 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003743 // Instead of simply (possibly) unpoisoning `temp` here, we should
3744 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003745 // intermediate/temporary reference and because the current
3746 // concurrent copying collector keeps the from-space memory
3747 // intact/accessible until the end of the marking phase (the
3748 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003749 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3750 // temp = temp->GetMethodAt(method_offset);
3751 __ Ldr(temp, MemOperand(temp, method_offset));
3752 // lr = temp->GetEntryPoint();
3753 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3754 // lr();
3755 __ Blr(lr);
3756}
3757
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003758vixl::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(const DexFile& dex_file,
3759 uint32_t string_index,
3760 vixl::Label* adrp_label) {
3761 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3762}
3763
3764vixl::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
3765 uint32_t element_offset,
3766 vixl::Label* adrp_label) {
3767 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3768}
3769
3770vixl::Label* CodeGeneratorARM64::NewPcRelativePatch(const DexFile& dex_file,
3771 uint32_t offset_or_index,
3772 vixl::Label* adrp_label,
3773 ArenaDeque<PcRelativePatchInfo>* patches) {
3774 // Add a patch entry and return the label.
3775 patches->emplace_back(dex_file, offset_or_index);
3776 PcRelativePatchInfo* info = &patches->back();
3777 vixl::Label* label = &info->label;
3778 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3779 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3780 return label;
3781}
3782
3783vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
3784 const DexFile& dex_file, uint32_t string_index) {
3785 return boot_image_string_patches_.GetOrCreate(
3786 StringReference(&dex_file, string_index),
3787 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3788}
3789
3790vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(uint64_t address) {
3791 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3792 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3793 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3794}
3795
3796vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(uint64_t address) {
3797 return DeduplicateUint64Literal(address);
3798}
3799
Vladimir Marko58155012015-08-19 12:49:41 +00003800void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3801 DCHECK(linker_patches->empty());
3802 size_t size =
3803 method_patches_.size() +
3804 call_patches_.size() +
3805 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003806 pc_relative_dex_cache_patches_.size() +
3807 boot_image_string_patches_.size() +
3808 pc_relative_string_patches_.size() +
3809 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003810 linker_patches->reserve(size);
3811 for (const auto& entry : method_patches_) {
3812 const MethodReference& target_method = entry.first;
3813 vixl::Literal<uint64_t>* literal = entry.second;
3814 linker_patches->push_back(LinkerPatch::MethodPatch(literal->offset(),
3815 target_method.dex_file,
3816 target_method.dex_method_index));
3817 }
3818 for (const auto& entry : call_patches_) {
3819 const MethodReference& target_method = entry.first;
3820 vixl::Literal<uint64_t>* literal = entry.second;
3821 linker_patches->push_back(LinkerPatch::CodePatch(literal->offset(),
3822 target_method.dex_file,
3823 target_method.dex_method_index));
3824 }
3825 for (const MethodPatchInfo<vixl::Label>& info : relative_call_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003826 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003827 info.target_method.dex_file,
3828 info.target_method.dex_method_index));
3829 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003830 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003831 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003832 &info.target_dex_file,
Alexandre Rames6dc01742015-11-12 14:44:19 +00003833 info.pc_insn_label->location(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003834 info.offset_or_index));
3835 }
3836 for (const auto& entry : boot_image_string_patches_) {
3837 const StringReference& target_string = entry.first;
3838 vixl::Literal<uint32_t>* literal = entry.second;
3839 linker_patches->push_back(LinkerPatch::StringPatch(literal->offset(),
3840 target_string.dex_file,
3841 target_string.string_index));
3842 }
3843 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
3844 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.location(),
3845 &info.target_dex_file,
3846 info.pc_insn_label->location(),
3847 info.offset_or_index));
3848 }
3849 for (const auto& entry : boot_image_address_patches_) {
3850 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
3851 vixl::Literal<uint32_t>* literal = entry.second;
3852 linker_patches->push_back(LinkerPatch::RecordPosition(literal->offset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003853 }
3854}
3855
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003856vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
3857 Uint32ToLiteralMap* map) {
3858 return map->GetOrCreate(
3859 value,
3860 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3861}
3862
Vladimir Marko58155012015-08-19 12:49:41 +00003863vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003864 return uint64_literals_.GetOrCreate(
3865 value,
3866 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003867}
3868
3869vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
3870 MethodReference target_method,
3871 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003872 return map->GetOrCreate(
3873 target_method,
3874 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003875}
3876
3877vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
3878 MethodReference target_method) {
3879 return DeduplicateMethodLiteral(target_method, &method_patches_);
3880}
3881
3882vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
3883 MethodReference target_method) {
3884 return DeduplicateMethodLiteral(target_method, &call_patches_);
3885}
3886
3887
Andreas Gampe878d58c2015-01-15 23:24:00 -08003888void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003889 // Explicit clinit checks triggered by static invokes must have been pruned by
3890 // art::PrepareForRegisterAllocation.
3891 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003892
Andreas Gampe878d58c2015-01-15 23:24:00 -08003893 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3894 return;
3895 }
3896
Alexandre Ramesd921d642015-04-16 15:07:16 +01003897 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003898 LocationSummary* locations = invoke->GetLocations();
3899 codegen_->GenerateStaticOrDirectCall(
3900 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003901 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003902}
3903
3904void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003905 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3906 return;
3907 }
3908
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003909 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003910 DCHECK(!codegen_->IsLeafMethod());
3911 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3912}
3913
Alexandre Rames67555f72014-11-18 10:55:16 +00003914void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003915 InvokeRuntimeCallingConvention calling_convention;
3916 CodeGenerator::CreateLoadClassLocationSummary(
3917 cls,
3918 LocationFrom(calling_convention.GetRegisterAt(0)),
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003919 LocationFrom(vixl::x0),
3920 /* code_generator_supports_read_barrier */ true);
Alexandre Rames67555f72014-11-18 10:55:16 +00003921}
3922
3923void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003924 if (cls->NeedsAccessCheck()) {
3925 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
3926 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
3927 cls,
3928 cls->GetDexPc(),
3929 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003930 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003931 return;
3932 }
3933
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003934 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01003935 Register out = OutputRegister(cls);
3936 Register current_method = InputRegisterAt(cls, 0);
3937 if (cls->IsReferrersClass()) {
Alexandre Rames67555f72014-11-18 10:55:16 +00003938 DCHECK(!cls->CanCallRuntime());
3939 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain44015862016-01-22 11:47:17 +00003940 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3941 GenerateGcRootFieldLoad(
3942 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Alexandre Rames67555f72014-11-18 10:55:16 +00003943 } else {
Vladimir Marko05792b92015-08-03 11:56:49 +01003944 MemberOffset resolved_types_offset = ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003945 // /* GcRoot<mirror::Class>[] */ out =
3946 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
Vladimir Marko05792b92015-08-03 11:56:49 +01003947 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00003948 // /* GcRoot<mirror::Class> */ out = out[type_index]
3949 GenerateGcRootFieldLoad(
3950 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003951
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00003952 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
3953 DCHECK(cls->CanCallRuntime());
3954 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
3955 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3956 codegen_->AddSlowPath(slow_path);
3957 if (!cls->IsInDexCache()) {
3958 __ Cbz(out, slow_path->GetEntryLabel());
3959 }
3960 if (cls->MustGenerateClinitCheck()) {
3961 GenerateClassInitializationCheck(slow_path, out);
3962 } else {
3963 __ Bind(slow_path->GetExitLabel());
3964 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003965 }
3966 }
3967}
3968
David Brazdilcb1c0552015-08-04 16:22:25 +01003969static MemOperand GetExceptionTlsAddress() {
3970 return MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
3971}
3972
Alexandre Rames67555f72014-11-18 10:55:16 +00003973void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
3974 LocationSummary* locations =
3975 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3976 locations->SetOut(Location::RequiresRegister());
3977}
3978
3979void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01003980 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
3981}
3982
3983void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
3984 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3985}
3986
3987void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
3988 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00003989}
3990
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003991HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
3992 HLoadString::LoadKind desired_string_load_kind) {
3993 if (kEmitCompilerReadBarrier) {
3994 switch (desired_string_load_kind) {
3995 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
3996 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
3997 case HLoadString::LoadKind::kBootImageAddress:
3998 // TODO: Implement for read barrier.
3999 return HLoadString::LoadKind::kDexCacheViaMethod;
4000 default:
4001 break;
4002 }
4003 }
4004 switch (desired_string_load_kind) {
4005 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4006 DCHECK(!GetCompilerOptions().GetCompilePic());
4007 break;
4008 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4009 DCHECK(GetCompilerOptions().GetCompilePic());
4010 break;
4011 case HLoadString::LoadKind::kBootImageAddress:
4012 break;
4013 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01004014 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004015 break;
4016 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01004017 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004018 break;
4019 case HLoadString::LoadKind::kDexCacheViaMethod:
4020 break;
4021 }
4022 return desired_string_load_kind;
4023}
4024
Alexandre Rames67555f72014-11-18 10:55:16 +00004025void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004026 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004027 ? LocationSummary::kCallOnSlowPath
4028 : LocationSummary::kNoCall;
4029 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004030 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4031 locations->SetInAt(0, Location::RequiresRegister());
4032 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004033 locations->SetOut(Location::RequiresRegister());
4034}
4035
4036void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004037 Location out_loc = load->GetLocations()->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00004038 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004039
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004040 switch (load->GetLoadKind()) {
4041 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4042 DCHECK(!kEmitCompilerReadBarrier);
4043 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4044 load->GetStringIndex()));
4045 return; // No dex cache slow path.
4046 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4047 DCHECK(!kEmitCompilerReadBarrier);
4048 // Add ADRP with its PC-relative String patch.
4049 const DexFile& dex_file = load->GetDexFile();
4050 uint32_t string_index = load->GetStringIndex();
4051 vixl::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
4052 {
4053 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4054 __ Bind(adrp_label);
4055 __ adrp(out.X(), /* offset placeholder */ 0);
4056 }
4057 // Add ADD with its PC-relative String patch.
4058 vixl::Label* add_label =
4059 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4060 {
4061 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4062 __ Bind(add_label);
4063 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4064 }
4065 return; // No dex cache slow path.
4066 }
4067 case HLoadString::LoadKind::kBootImageAddress: {
4068 DCHECK(!kEmitCompilerReadBarrier);
4069 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4070 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4071 return; // No dex cache slow path.
4072 }
4073 case HLoadString::LoadKind::kDexCacheAddress: {
4074 DCHECK_NE(load->GetAddress(), 0u);
4075 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4076 // that gives a 16KiB range. To try and reduce the number of literals if we load
4077 // multiple strings, simply split the dex cache address to a 16KiB aligned base
4078 // loaded from a literal and the remaining offset embedded in the load.
4079 static_assert(sizeof(GcRoot<mirror::String>) == 4u, "Expected GC root to be 4 bytes.");
4080 DCHECK_ALIGNED(load->GetAddress(), 4u);
4081 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4082 uint64_t base_address = load->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4083 uint32_t offset = load->GetAddress() & MaxInt<uint64_t>(offset_bits);
4084 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4085 GenerateGcRootFieldLoad(load, out_loc, out.X(), offset);
4086 break;
4087 }
4088 case HLoadString::LoadKind::kDexCachePcRelative: {
4089 // Add ADRP with its PC-relative DexCache access patch.
4090 const DexFile& dex_file = load->GetDexFile();
4091 uint32_t element_offset = load->GetDexCacheElementOffset();
4092 vixl::Label* adrp_label = codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
4093 {
4094 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4095 __ Bind(adrp_label);
4096 __ adrp(out.X(), /* offset placeholder */ 0);
4097 }
4098 // Add LDR with its PC-relative DexCache access patch.
4099 vixl::Label* ldr_label =
4100 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4101 GenerateGcRootFieldLoad(load, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4102 break;
4103 }
4104 case HLoadString::LoadKind::kDexCacheViaMethod: {
4105 Register current_method = InputRegisterAt(load, 0);
4106 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4107 GenerateGcRootFieldLoad(
4108 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4109 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
4110 __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
4111 // /* GcRoot<mirror::String> */ out = out[string_index]
4112 GenerateGcRootFieldLoad(
4113 load, out_loc, out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()));
4114 break;
4115 }
4116 default:
4117 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
4118 UNREACHABLE();
4119 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004120
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004121 if (!load->IsInDexCache()) {
4122 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
4123 codegen_->AddSlowPath(slow_path);
4124 __ Cbz(out, slow_path->GetEntryLabel());
4125 __ Bind(slow_path->GetExitLabel());
4126 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004127}
4128
Alexandre Rames5319def2014-10-23 10:03:10 +01004129void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4130 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4131 locations->SetOut(Location::ConstantLocation(constant));
4132}
4133
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004134void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004135 // Will be generated at use site.
4136}
4137
Alexandre Rames67555f72014-11-18 10:55:16 +00004138void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4139 LocationSummary* locations =
4140 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4141 InvokeRuntimeCallingConvention calling_convention;
4142 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4143}
4144
4145void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4146 codegen_->InvokeRuntime(instruction->IsEnter()
4147 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
4148 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004149 instruction->GetDexPc(),
4150 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004151 if (instruction->IsEnter()) {
4152 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4153 } else {
4154 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4155 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004156}
4157
Alexandre Rames42d641b2014-10-27 14:00:51 +00004158void LocationsBuilderARM64::VisitMul(HMul* mul) {
4159 LocationSummary* locations =
4160 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4161 switch (mul->GetResultType()) {
4162 case Primitive::kPrimInt:
4163 case Primitive::kPrimLong:
4164 locations->SetInAt(0, Location::RequiresRegister());
4165 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004166 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004167 break;
4168
4169 case Primitive::kPrimFloat:
4170 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004171 locations->SetInAt(0, Location::RequiresFpuRegister());
4172 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004173 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004174 break;
4175
4176 default:
4177 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4178 }
4179}
4180
4181void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4182 switch (mul->GetResultType()) {
4183 case Primitive::kPrimInt:
4184 case Primitive::kPrimLong:
4185 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4186 break;
4187
4188 case Primitive::kPrimFloat:
4189 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004190 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004191 break;
4192
4193 default:
4194 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4195 }
4196}
4197
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004198void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4199 LocationSummary* locations =
4200 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4201 switch (neg->GetResultType()) {
4202 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004203 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004204 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004205 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004206 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004207
4208 case Primitive::kPrimFloat:
4209 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004210 locations->SetInAt(0, Location::RequiresFpuRegister());
4211 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004212 break;
4213
4214 default:
4215 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4216 }
4217}
4218
4219void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4220 switch (neg->GetResultType()) {
4221 case Primitive::kPrimInt:
4222 case Primitive::kPrimLong:
4223 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4224 break;
4225
4226 case Primitive::kPrimFloat:
4227 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004228 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004229 break;
4230
4231 default:
4232 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4233 }
4234}
4235
4236void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4237 LocationSummary* locations =
4238 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4239 InvokeRuntimeCallingConvention calling_convention;
4240 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004241 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004242 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004243 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004244}
4245
4246void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4247 LocationSummary* locations = instruction->GetLocations();
4248 InvokeRuntimeCallingConvention calling_convention;
4249 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4250 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004251 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004252 // Note: if heap poisoning is enabled, the entry point takes cares
4253 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01004254 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4255 instruction,
4256 instruction->GetDexPc(),
4257 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004258 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004259}
4260
Alexandre Rames5319def2014-10-23 10:03:10 +01004261void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4262 LocationSummary* locations =
4263 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4264 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004265 if (instruction->IsStringAlloc()) {
4266 locations->AddTemp(LocationFrom(kArtMethodRegister));
4267 } else {
4268 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4269 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4270 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004271 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4272}
4273
4274void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004275 // Note: if heap poisoning is enabled, the entry point takes cares
4276 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004277 if (instruction->IsStringAlloc()) {
4278 // String is allocated through StringFactory. Call NewEmptyString entry point.
4279 Location temp = instruction->GetLocations()->GetTemp(0);
4280 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
4281 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4282 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4283 __ Blr(lr);
4284 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4285 } else {
4286 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4287 instruction,
4288 instruction->GetDexPc(),
4289 nullptr);
4290 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4291 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004292}
4293
4294void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4295 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004296 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004297 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004298}
4299
4300void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004301 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004302 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004303 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004304 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004305 break;
4306
4307 default:
4308 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4309 }
4310}
4311
David Brazdil66d126e2015-04-03 16:02:44 +01004312void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4313 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4314 locations->SetInAt(0, Location::RequiresRegister());
4315 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4316}
4317
4318void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01004319 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
4320}
4321
Alexandre Rames5319def2014-10-23 10:03:10 +01004322void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004323 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4324 ? LocationSummary::kCallOnSlowPath
4325 : LocationSummary::kNoCall;
4326 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004327 locations->SetInAt(0, Location::RequiresRegister());
4328 if (instruction->HasUses()) {
4329 locations->SetOut(Location::SameAsFirstInput());
4330 }
4331}
4332
Calin Juravle2ae48182016-03-16 14:05:09 +00004333void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4334 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004335 return;
4336 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004337
Alexandre Ramesd921d642015-04-16 15:07:16 +01004338 BlockPoolsScope block_pools(GetVIXLAssembler());
4339 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004340 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004341 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004342}
4343
Calin Juravle2ae48182016-03-16 14:05:09 +00004344void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004345 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004346 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004347
4348 LocationSummary* locations = instruction->GetLocations();
4349 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004350
4351 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004352}
4353
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004354void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004355 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004356}
4357
Alexandre Rames67555f72014-11-18 10:55:16 +00004358void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4359 HandleBinaryOp(instruction);
4360}
4361
4362void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4363 HandleBinaryOp(instruction);
4364}
4365
Alexandre Rames3e69f162014-12-10 10:36:50 +00004366void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4367 LOG(FATAL) << "Unreachable";
4368}
4369
4370void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4371 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4372}
4373
Alexandre Rames5319def2014-10-23 10:03:10 +01004374void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4375 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4376 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4377 if (location.IsStackSlot()) {
4378 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4379 } else if (location.IsDoubleStackSlot()) {
4380 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4381 }
4382 locations->SetOut(location);
4383}
4384
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004385void InstructionCodeGeneratorARM64::VisitParameterValue(
4386 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004387 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004388}
4389
4390void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4391 LocationSummary* locations =
4392 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004393 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004394}
4395
4396void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4397 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4398 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004399}
4400
4401void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4402 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004403 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004404 locations->SetInAt(i, Location::Any());
4405 }
4406 locations->SetOut(Location::Any());
4407}
4408
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004409void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004410 LOG(FATAL) << "Unreachable";
4411}
4412
Serban Constantinescu02164b32014-11-13 14:05:07 +00004413void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004414 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004415 LocationSummary::CallKind call_kind =
4416 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004417 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4418
4419 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004420 case Primitive::kPrimInt:
4421 case Primitive::kPrimLong:
4422 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004423 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004424 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4425 break;
4426
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004427 case Primitive::kPrimFloat:
4428 case Primitive::kPrimDouble: {
4429 InvokeRuntimeCallingConvention calling_convention;
4430 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4431 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4432 locations->SetOut(calling_convention.GetReturnLocation(type));
4433
4434 break;
4435 }
4436
Serban Constantinescu02164b32014-11-13 14:05:07 +00004437 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004438 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004439 }
4440}
4441
4442void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4443 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004444
Serban Constantinescu02164b32014-11-13 14:05:07 +00004445 switch (type) {
4446 case Primitive::kPrimInt:
4447 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004448 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004449 break;
4450 }
4451
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004452 case Primitive::kPrimFloat:
4453 case Primitive::kPrimDouble: {
4454 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
4455 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004456 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004457 if (type == Primitive::kPrimFloat) {
4458 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4459 } else {
4460 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4461 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004462 break;
4463 }
4464
Serban Constantinescu02164b32014-11-13 14:05:07 +00004465 default:
4466 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004467 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004468 }
4469}
4470
Calin Juravle27df7582015-04-17 19:12:31 +01004471void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4472 memory_barrier->SetLocations(nullptr);
4473}
4474
4475void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004476 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004477}
4478
Alexandre Rames5319def2014-10-23 10:03:10 +01004479void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4480 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4481 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004482 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004483}
4484
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004485void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004486 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004487}
4488
4489void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4490 instruction->SetLocations(nullptr);
4491}
4492
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004493void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004494 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004495}
4496
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004497void LocationsBuilderARM64::VisitRor(HRor* ror) {
4498 HandleBinaryOp(ror);
4499}
4500
4501void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4502 HandleBinaryOp(ror);
4503}
4504
Serban Constantinescu02164b32014-11-13 14:05:07 +00004505void LocationsBuilderARM64::VisitShl(HShl* shl) {
4506 HandleShift(shl);
4507}
4508
4509void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4510 HandleShift(shl);
4511}
4512
4513void LocationsBuilderARM64::VisitShr(HShr* shr) {
4514 HandleShift(shr);
4515}
4516
4517void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4518 HandleShift(shr);
4519}
4520
Alexandre Rames5319def2014-10-23 10:03:10 +01004521void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004522 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004523}
4524
4525void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004526 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004527}
4528
Alexandre Rames67555f72014-11-18 10:55:16 +00004529void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004530 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004531}
4532
4533void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004534 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004535}
4536
4537void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004538 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004539}
4540
Alexandre Rames67555f72014-11-18 10:55:16 +00004541void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004542 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004543}
4544
Calin Juravlee460d1d2015-09-29 04:52:17 +01004545void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4546 HUnresolvedInstanceFieldGet* instruction) {
4547 FieldAccessCallingConventionARM64 calling_convention;
4548 codegen_->CreateUnresolvedFieldLocationSummary(
4549 instruction, instruction->GetFieldType(), calling_convention);
4550}
4551
4552void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4553 HUnresolvedInstanceFieldGet* instruction) {
4554 FieldAccessCallingConventionARM64 calling_convention;
4555 codegen_->GenerateUnresolvedFieldAccess(instruction,
4556 instruction->GetFieldType(),
4557 instruction->GetFieldIndex(),
4558 instruction->GetDexPc(),
4559 calling_convention);
4560}
4561
4562void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4563 HUnresolvedInstanceFieldSet* instruction) {
4564 FieldAccessCallingConventionARM64 calling_convention;
4565 codegen_->CreateUnresolvedFieldLocationSummary(
4566 instruction, instruction->GetFieldType(), calling_convention);
4567}
4568
4569void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4570 HUnresolvedInstanceFieldSet* instruction) {
4571 FieldAccessCallingConventionARM64 calling_convention;
4572 codegen_->GenerateUnresolvedFieldAccess(instruction,
4573 instruction->GetFieldType(),
4574 instruction->GetFieldIndex(),
4575 instruction->GetDexPc(),
4576 calling_convention);
4577}
4578
4579void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4580 HUnresolvedStaticFieldGet* instruction) {
4581 FieldAccessCallingConventionARM64 calling_convention;
4582 codegen_->CreateUnresolvedFieldLocationSummary(
4583 instruction, instruction->GetFieldType(), calling_convention);
4584}
4585
4586void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4587 HUnresolvedStaticFieldGet* instruction) {
4588 FieldAccessCallingConventionARM64 calling_convention;
4589 codegen_->GenerateUnresolvedFieldAccess(instruction,
4590 instruction->GetFieldType(),
4591 instruction->GetFieldIndex(),
4592 instruction->GetDexPc(),
4593 calling_convention);
4594}
4595
4596void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4597 HUnresolvedStaticFieldSet* instruction) {
4598 FieldAccessCallingConventionARM64 calling_convention;
4599 codegen_->CreateUnresolvedFieldLocationSummary(
4600 instruction, instruction->GetFieldType(), calling_convention);
4601}
4602
4603void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4604 HUnresolvedStaticFieldSet* instruction) {
4605 FieldAccessCallingConventionARM64 calling_convention;
4606 codegen_->GenerateUnresolvedFieldAccess(instruction,
4607 instruction->GetFieldType(),
4608 instruction->GetFieldIndex(),
4609 instruction->GetDexPc(),
4610 calling_convention);
4611}
4612
Alexandre Rames5319def2014-10-23 10:03:10 +01004613void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4614 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4615}
4616
4617void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004618 HBasicBlock* block = instruction->GetBlock();
4619 if (block->GetLoopInformation() != nullptr) {
4620 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4621 // The back edge will generate the suspend check.
4622 return;
4623 }
4624 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4625 // The goto will generate the suspend check.
4626 return;
4627 }
4628 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004629}
4630
Alexandre Rames67555f72014-11-18 10:55:16 +00004631void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4632 LocationSummary* locations =
4633 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4634 InvokeRuntimeCallingConvention calling_convention;
4635 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4636}
4637
4638void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
4639 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004640 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004641 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004642}
4643
4644void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4645 LocationSummary* locations =
4646 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4647 Primitive::Type input_type = conversion->GetInputType();
4648 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004649 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004650 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4651 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4652 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4653 }
4654
Alexandre Rames542361f2015-01-29 16:57:31 +00004655 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004656 locations->SetInAt(0, Location::RequiresFpuRegister());
4657 } else {
4658 locations->SetInAt(0, Location::RequiresRegister());
4659 }
4660
Alexandre Rames542361f2015-01-29 16:57:31 +00004661 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004662 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4663 } else {
4664 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4665 }
4666}
4667
4668void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4669 Primitive::Type result_type = conversion->GetResultType();
4670 Primitive::Type input_type = conversion->GetInputType();
4671
4672 DCHECK_NE(input_type, result_type);
4673
Alexandre Rames542361f2015-01-29 16:57:31 +00004674 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004675 int result_size = Primitive::ComponentSize(result_type);
4676 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004677 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004678 Register output = OutputRegister(conversion);
4679 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004680 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004681 // 'int' values are used directly as W registers, discarding the top
4682 // bits, so we don't need to sign-extend and can just perform a move.
4683 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4684 // top 32 bits of the target register. We theoretically could leave those
4685 // bits unchanged, but we would have to make sure that no code uses a
4686 // 32bit input value as a 64bit value assuming that the top 32 bits are
4687 // zero.
4688 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004689 } else if (result_type == Primitive::kPrimChar ||
4690 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4691 __ Ubfx(output,
4692 output.IsX() ? source.X() : source.W(),
4693 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004694 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004695 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004696 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004697 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004698 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004699 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004700 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4701 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004702 } else if (Primitive::IsFloatingPointType(result_type) &&
4703 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004704 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4705 } else {
4706 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4707 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004708 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004709}
Alexandre Rames67555f72014-11-18 10:55:16 +00004710
Serban Constantinescu02164b32014-11-13 14:05:07 +00004711void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4712 HandleShift(ushr);
4713}
4714
4715void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4716 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004717}
4718
4719void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4720 HandleBinaryOp(instruction);
4721}
4722
4723void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4724 HandleBinaryOp(instruction);
4725}
4726
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004727void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004728 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004729 LOG(FATAL) << "Unreachable";
4730}
4731
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004732void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004733 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004734 LOG(FATAL) << "Unreachable";
4735}
4736
Mark Mendellfe57faa2015-09-18 09:26:15 -04004737// Simple implementation of packed switch - generate cascaded compare/jumps.
4738void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4739 LocationSummary* locations =
4740 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4741 locations->SetInAt(0, Location::RequiresRegister());
4742}
4743
4744void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4745 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004746 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004747 Register value_reg = InputRegisterAt(switch_instr, 0);
4748 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4749
Zheng Xu3927c8b2015-11-18 17:46:25 +08004750 // Roughly set 16 as max average assemblies generated per HIR in a graph.
4751 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * vixl::kInstructionSize;
4752 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4753 // make sure we don't emit it if the target may run out of range.
4754 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4755 // ranges and emit the tables only as required.
4756 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004757
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004758 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004759 // Current instruction id is an upper bound of the number of HIRs in the graph.
4760 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4761 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004762 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4763 Register temp = temps.AcquireW();
4764 __ Subs(temp, value_reg, Operand(lower_bound));
4765
Zheng Xu3927c8b2015-11-18 17:46:25 +08004766 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004767 // Jump to successors[0] if value == lower_bound.
4768 __ B(eq, codegen_->GetLabelOf(successors[0]));
4769 int32_t last_index = 0;
4770 for (; num_entries - last_index > 2; last_index += 2) {
4771 __ Subs(temp, temp, Operand(2));
4772 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4773 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4774 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4775 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4776 }
4777 if (num_entries - last_index == 2) {
4778 // The last missing case_value.
4779 __ Cmp(temp, Operand(1));
4780 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004781 }
4782
4783 // And the default for any other value.
4784 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4785 __ B(codegen_->GetLabelOf(default_block));
4786 }
4787 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004788 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004789
4790 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4791
4792 // Below instructions should use at most one blocked register. Since there are two blocked
4793 // registers, we are free to block one.
4794 Register temp_w = temps.AcquireW();
4795 Register index;
4796 // Remove the bias.
4797 if (lower_bound != 0) {
4798 index = temp_w;
4799 __ Sub(index, value_reg, Operand(lower_bound));
4800 } else {
4801 index = value_reg;
4802 }
4803
4804 // Jump to default block if index is out of the range.
4805 __ Cmp(index, Operand(num_entries));
4806 __ B(hs, codegen_->GetLabelOf(default_block));
4807
4808 // In current VIXL implementation, it won't require any blocked registers to encode the
4809 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4810 // register pressure.
4811 Register table_base = temps.AcquireX();
4812 // Load jump offset from the table.
4813 __ Adr(table_base, jump_table->GetTableStartLabel());
4814 Register jump_offset = temp_w;
4815 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4816
4817 // Jump to target block by branching to table_base(pc related) + offset.
4818 Register target_address = table_base;
4819 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4820 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004821 }
4822}
4823
Roland Levillain44015862016-01-22 11:47:17 +00004824void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4825 Location out,
4826 uint32_t offset,
4827 Location maybe_temp) {
4828 Primitive::Type type = Primitive::kPrimNot;
4829 Register out_reg = RegisterFrom(out, type);
4830 if (kEmitCompilerReadBarrier) {
4831 Register temp_reg = RegisterFrom(maybe_temp, type);
4832 if (kUseBakerReadBarrier) {
4833 // Load with fast path based Baker's read barrier.
4834 // /* HeapReference<Object> */ out = *(out + offset)
4835 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4836 out,
4837 out_reg,
4838 offset,
4839 temp_reg,
4840 /* needs_null_check */ false,
4841 /* use_load_acquire */ false);
4842 } else {
4843 // Load with slow path based read barrier.
4844 // Save the value of `out` into `maybe_temp` before overwriting it
4845 // in the following move operation, as we will need it for the
4846 // read barrier below.
4847 __ Mov(temp_reg, out_reg);
4848 // /* HeapReference<Object> */ out = *(out + offset)
4849 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4850 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4851 }
4852 } else {
4853 // Plain load with no read barrier.
4854 // /* HeapReference<Object> */ out = *(out + offset)
4855 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4856 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4857 }
4858}
4859
4860void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
4861 Location out,
4862 Location obj,
4863 uint32_t offset,
4864 Location maybe_temp) {
4865 Primitive::Type type = Primitive::kPrimNot;
4866 Register out_reg = RegisterFrom(out, type);
4867 Register obj_reg = RegisterFrom(obj, type);
4868 if (kEmitCompilerReadBarrier) {
4869 if (kUseBakerReadBarrier) {
4870 // Load with fast path based Baker's read barrier.
4871 Register temp_reg = RegisterFrom(maybe_temp, type);
4872 // /* HeapReference<Object> */ out = *(obj + offset)
4873 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4874 out,
4875 obj_reg,
4876 offset,
4877 temp_reg,
4878 /* needs_null_check */ false,
4879 /* use_load_acquire */ false);
4880 } else {
4881 // Load with slow path based read barrier.
4882 // /* HeapReference<Object> */ out = *(obj + offset)
4883 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4884 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4885 }
4886 } else {
4887 // Plain load with no read barrier.
4888 // /* HeapReference<Object> */ out = *(obj + offset)
4889 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4890 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4891 }
4892}
4893
4894void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
4895 Location root,
4896 vixl::Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004897 uint32_t offset,
4898 vixl::Label* fixup_label) {
Roland Levillain44015862016-01-22 11:47:17 +00004899 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
4900 if (kEmitCompilerReadBarrier) {
4901 if (kUseBakerReadBarrier) {
4902 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4903 // Baker's read barrier are used:
4904 //
4905 // root = obj.field;
4906 // if (Thread::Current()->GetIsGcMarking()) {
4907 // root = ReadBarrier::Mark(root)
4908 // }
4909
4910 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004911 if (fixup_label == nullptr) {
4912 __ Ldr(root_reg, MemOperand(obj, offset));
4913 } else {
4914 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4915 __ Bind(fixup_label);
4916 __ ldr(root_reg, MemOperand(obj, offset));
4917 }
Roland Levillain44015862016-01-22 11:47:17 +00004918 static_assert(
4919 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
4920 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
4921 "have different sizes.");
4922 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
4923 "art::mirror::CompressedReference<mirror::Object> and int32_t "
4924 "have different sizes.");
4925
4926 // Slow path used to mark the GC root `root`.
4927 SlowPathCodeARM64* slow_path =
4928 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root, root);
4929 codegen_->AddSlowPath(slow_path);
4930
4931 MacroAssembler* masm = GetVIXLAssembler();
4932 UseScratchRegisterScope temps(masm);
4933 Register temp = temps.AcquireW();
4934 // temp = Thread::Current()->GetIsGcMarking()
4935 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64WordSize>().Int32Value()));
4936 __ Cbnz(temp, slow_path->GetEntryLabel());
4937 __ Bind(slow_path->GetExitLabel());
4938 } else {
4939 // GC root loaded through a slow path for read barriers other
4940 // than Baker's.
4941 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004942 if (fixup_label == nullptr) {
4943 __ Add(root_reg.X(), obj.X(), offset);
4944 } else {
4945 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4946 __ Bind(fixup_label);
4947 __ add(root_reg.X(), obj.X(), offset);
4948 }
Roland Levillain44015862016-01-22 11:47:17 +00004949 // /* mirror::Object* */ root = root->Read()
4950 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
4951 }
4952 } else {
4953 // Plain GC root load with no read barrier.
4954 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004955 if (fixup_label == nullptr) {
4956 __ Ldr(root_reg, MemOperand(obj, offset));
4957 } else {
4958 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4959 __ Bind(fixup_label);
4960 __ ldr(root_reg, MemOperand(obj, offset));
4961 }
Roland Levillain44015862016-01-22 11:47:17 +00004962 // Note that GC roots are not affected by heap poisoning, thus we
4963 // do not have to unpoison `root_reg` here.
4964 }
4965}
4966
4967void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
4968 Location ref,
4969 vixl::Register obj,
4970 uint32_t offset,
4971 Register temp,
4972 bool needs_null_check,
4973 bool use_load_acquire) {
4974 DCHECK(kEmitCompilerReadBarrier);
4975 DCHECK(kUseBakerReadBarrier);
4976
4977 // /* HeapReference<Object> */ ref = *(obj + offset)
4978 Location no_index = Location::NoLocation();
4979 GenerateReferenceLoadWithBakerReadBarrier(
4980 instruction, ref, obj, offset, no_index, temp, needs_null_check, use_load_acquire);
4981}
4982
4983void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
4984 Location ref,
4985 vixl::Register obj,
4986 uint32_t data_offset,
4987 Location index,
4988 Register temp,
4989 bool needs_null_check) {
4990 DCHECK(kEmitCompilerReadBarrier);
4991 DCHECK(kUseBakerReadBarrier);
4992
4993 // Array cells are never volatile variables, therefore array loads
4994 // never use Load-Acquire instructions on ARM64.
4995 const bool use_load_acquire = false;
4996
4997 // /* HeapReference<Object> */ ref =
4998 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4999 GenerateReferenceLoadWithBakerReadBarrier(
5000 instruction, ref, obj, data_offset, index, temp, needs_null_check, use_load_acquire);
5001}
5002
5003void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5004 Location ref,
5005 vixl::Register obj,
5006 uint32_t offset,
5007 Location index,
5008 Register temp,
5009 bool needs_null_check,
5010 bool use_load_acquire) {
5011 DCHECK(kEmitCompilerReadBarrier);
5012 DCHECK(kUseBakerReadBarrier);
5013 // If `index` is a valid location, then we are emitting an array
5014 // load, so we shouldn't be using a Load Acquire instruction.
5015 // In other words: `index.IsValid()` => `!use_load_acquire`.
5016 DCHECK(!index.IsValid() || !use_load_acquire);
5017
5018 MacroAssembler* masm = GetVIXLAssembler();
5019 UseScratchRegisterScope temps(masm);
5020
5021 // In slow path based read barriers, the read barrier call is
5022 // inserted after the original load. However, in fast path based
5023 // Baker's read barriers, we need to perform the load of
5024 // mirror::Object::monitor_ *before* the original reference load.
5025 // This load-load ordering is required by the read barrier.
5026 // The fast path/slow path (for Baker's algorithm) should look like:
5027 //
5028 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5029 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5030 // HeapReference<Object> ref = *src; // Original reference load.
5031 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5032 // if (is_gray) {
5033 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5034 // }
5035 //
5036 // Note: the original implementation in ReadBarrier::Barrier is
5037 // slightly more complex as it performs additional checks that we do
5038 // not do here for performance reasons.
5039
5040 Primitive::Type type = Primitive::kPrimNot;
5041 Register ref_reg = RegisterFrom(ref, type);
5042 DCHECK(obj.IsW());
5043 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5044
5045 // /* int32_t */ monitor = obj->monitor_
5046 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5047 if (needs_null_check) {
5048 MaybeRecordImplicitNullCheck(instruction);
5049 }
5050 // /* LockWord */ lock_word = LockWord(monitor)
5051 static_assert(sizeof(LockWord) == sizeof(int32_t),
5052 "art::LockWord and int32_t have different sizes.");
5053 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
5054 __ Lsr(temp, temp, LockWord::kReadBarrierStateShift);
5055 __ And(temp, temp, Operand(LockWord::kReadBarrierStateMask));
5056 static_assert(
5057 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
5058 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
5059
5060 // Introduce a dependency on the high bits of rb_state, which shall
5061 // be all zeroes, to prevent load-load reordering, and without using
5062 // a memory barrier (which would be more expensive).
5063 // temp2 = rb_state & ~LockWord::kReadBarrierStateMask = 0
5064 Register temp2 = temps.AcquireW();
5065 __ Bic(temp2, temp, Operand(LockWord::kReadBarrierStateMask));
5066 // obj is unchanged by this operation, but its value now depends on
5067 // temp2, which depends on temp.
5068 __ Add(obj, obj, Operand(temp2));
5069 temps.Release(temp2);
5070
5071 // The actual reference load.
5072 if (index.IsValid()) {
5073 static_assert(
5074 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5075 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005076 // /* HeapReference<Object> */ ref =
5077 // *(obj + offset + index * sizeof(HeapReference<Object>))
Roland Levillainca0bf032016-02-09 12:49:18 +00005078 const size_t shift_amount = Primitive::ComponentSizeShift(type);
Roland Levillain44015862016-01-22 11:47:17 +00005079 if (index.IsConstant()) {
Roland Levillainca0bf032016-02-09 12:49:18 +00005080 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << shift_amount);
5081 Load(type, ref_reg, HeapOperand(obj, computed_offset));
Roland Levillain44015862016-01-22 11:47:17 +00005082 } else {
Roland Levillainca0bf032016-02-09 12:49:18 +00005083 temp2 = temps.AcquireW();
Roland Levillain44015862016-01-22 11:47:17 +00005084 __ Add(temp2, obj, offset);
Roland Levillainca0bf032016-02-09 12:49:18 +00005085 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, shift_amount));
5086 temps.Release(temp2);
Roland Levillain44015862016-01-22 11:47:17 +00005087 }
Roland Levillain44015862016-01-22 11:47:17 +00005088 } else {
5089 // /* HeapReference<Object> */ ref = *(obj + offset)
5090 MemOperand field = HeapOperand(obj, offset);
5091 if (use_load_acquire) {
5092 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5093 } else {
5094 Load(type, ref_reg, field);
5095 }
5096 }
5097
5098 // Object* ref = ref_addr->AsMirrorPtr()
5099 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5100
5101 // Slow path used to mark the object `ref` when it is gray.
5102 SlowPathCodeARM64* slow_path =
5103 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref, ref);
5104 AddSlowPath(slow_path);
5105
5106 // if (rb_state == ReadBarrier::gray_ptr_)
5107 // ref = ReadBarrier::Mark(ref);
5108 __ Cmp(temp, ReadBarrier::gray_ptr_);
5109 __ B(eq, slow_path->GetEntryLabel());
5110 __ Bind(slow_path->GetExitLabel());
5111}
5112
5113void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5114 Location out,
5115 Location ref,
5116 Location obj,
5117 uint32_t offset,
5118 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005119 DCHECK(kEmitCompilerReadBarrier);
5120
Roland Levillain44015862016-01-22 11:47:17 +00005121 // Insert a slow path based read barrier *after* the reference load.
5122 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005123 // If heap poisoning is enabled, the unpoisoning of the loaded
5124 // reference will be carried out by the runtime within the slow
5125 // path.
5126 //
5127 // Note that `ref` currently does not get unpoisoned (when heap
5128 // poisoning is enabled), which is alright as the `ref` argument is
5129 // not used by the artReadBarrierSlow entry point.
5130 //
5131 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5132 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5133 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5134 AddSlowPath(slow_path);
5135
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005136 __ B(slow_path->GetEntryLabel());
5137 __ Bind(slow_path->GetExitLabel());
5138}
5139
Roland Levillain44015862016-01-22 11:47:17 +00005140void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5141 Location out,
5142 Location ref,
5143 Location obj,
5144 uint32_t offset,
5145 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005146 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005147 // Baker's read barriers shall be handled by the fast path
5148 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5149 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005150 // If heap poisoning is enabled, unpoisoning will be taken care of
5151 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005152 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005153 } else if (kPoisonHeapReferences) {
5154 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5155 }
5156}
5157
Roland Levillain44015862016-01-22 11:47:17 +00005158void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5159 Location out,
5160 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005161 DCHECK(kEmitCompilerReadBarrier);
5162
Roland Levillain44015862016-01-22 11:47:17 +00005163 // Insert a slow path based read barrier *after* the GC root load.
5164 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005165 // Note that GC roots are not affected by heap poisoning, so we do
5166 // not need to do anything special for this here.
5167 SlowPathCodeARM64* slow_path =
5168 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5169 AddSlowPath(slow_path);
5170
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005171 __ B(slow_path->GetEntryLabel());
5172 __ Bind(slow_path->GetExitLabel());
5173}
5174
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005175void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5176 LocationSummary* locations =
5177 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5178 locations->SetInAt(0, Location::RequiresRegister());
5179 locations->SetOut(Location::RequiresRegister());
5180}
5181
5182void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5183 LocationSummary* locations = instruction->GetLocations();
5184 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00005185 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005186 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5187 instruction->GetIndex(), kArm64PointerSize).SizeValue();
5188 } else {
5189 method_offset = mirror::Class::EmbeddedImTableEntryOffset(
5190 instruction->GetIndex() % mirror::Class::kImtSize, kArm64PointerSize).Uint32Value();
5191 }
5192 __ Ldr(XRegisterFrom(locations->Out()),
5193 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
5194}
5195
5196
5197
Alexandre Rames67555f72014-11-18 10:55:16 +00005198#undef __
5199#undef QUICK_ENTRY_POINT
5200
Alexandre Rames5319def2014-10-23 10:03:10 +01005201} // namespace arm64
5202} // namespace art