blob: c5d09c8a7b2ed5d5d96ed91a9f396b884081ca5f [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
36
37using namespace vixl; // NOLINT(build/namespaces)
38
39#ifdef __
40#error "ARM64 Codegen VIXL macro-assembler macro already defined."
41#endif
42
Alexandre Rames5319def2014-10-23 10:03:10 +010043namespace art {
44
Roland Levillain22ccc3a2015-11-24 13:10:05 +000045template<class MirrorType>
46class GcRoot;
47
Alexandre Rames5319def2014-10-23 10:03:10 +010048namespace arm64 {
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050using helpers::CPURegisterFrom;
51using helpers::DRegisterFrom;
52using helpers::FPRegisterFrom;
53using helpers::HeapOperand;
54using helpers::HeapOperandFrom;
55using helpers::InputCPURegisterAt;
56using helpers::InputFPRegisterAt;
57using helpers::InputRegisterAt;
58using helpers::InputOperandAt;
59using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080060using helpers::LocationFrom;
61using helpers::OperandFromMemOperand;
62using helpers::OutputCPURegister;
63using helpers::OutputFPRegister;
64using helpers::OutputRegister;
65using helpers::RegisterFrom;
66using helpers::StackOperandFrom;
67using helpers::VIXLRegCodeFromART;
68using helpers::WRegisterFrom;
69using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000070using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080071using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080072
Alexandre Rames5319def2014-10-23 10:03:10 +010073static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000074// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080075// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
76// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000077static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010078
Alexandre Rames5319def2014-10-23 10:03:10 +010079inline Condition ARM64Condition(IfCondition cond) {
80 switch (cond) {
81 case kCondEQ: return eq;
82 case kCondNE: return ne;
83 case kCondLT: return lt;
84 case kCondLE: return le;
85 case kCondGT: return gt;
86 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070087 case kCondB: return lo;
88 case kCondBE: return ls;
89 case kCondA: return hi;
90 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010091 }
Roland Levillain7f63c522015-07-13 15:54:55 +000092 LOG(FATAL) << "Unreachable";
93 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010094}
95
Vladimir Markod6e069b2016-01-18 11:11:01 +000096inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
97 // The ARM64 condition codes can express all the necessary branches, see the
98 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
99 // There is no dex instruction or HIR that would need the missing conditions
100 // "equal or unordered" or "not equal".
101 switch (cond) {
102 case kCondEQ: return eq;
103 case kCondNE: return ne /* unordered */;
104 case kCondLT: return gt_bias ? cc : lt /* unordered */;
105 case kCondLE: return gt_bias ? ls : le /* unordered */;
106 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
107 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
108 default:
109 LOG(FATAL) << "UNREACHABLE";
110 UNREACHABLE();
111 }
112}
113
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000114Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
116 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
117 // but we use the exact registers for clarity.
118 if (return_type == Primitive::kPrimFloat) {
119 return LocationFrom(s0);
120 } else if (return_type == Primitive::kPrimDouble) {
121 return LocationFrom(d0);
122 } else if (return_type == Primitive::kPrimLong) {
123 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100124 } else if (return_type == Primitive::kPrimVoid) {
125 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000126 } else {
127 return LocationFrom(w0);
128 }
129}
130
Alexandre Rames5319def2014-10-23 10:03:10 +0100131Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100133}
134
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700135// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
136#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Alexandre Rames67555f72014-11-18 10:55:16 +0000137#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100138
Zheng Xuda403092015-04-24 17:35:39 +0800139// Calculate memory accessing operand for save/restore live registers.
140static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
141 RegisterSet* register_set,
142 int64_t spill_offset,
143 bool is_save) {
144 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
145 codegen->GetNumberOfCoreRegisters(),
146 register_set->GetFloatingPointRegisters(),
147 codegen->GetNumberOfFloatingPointRegisters()));
148
149 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
150 register_set->GetCoreRegisters() & (~callee_saved_core_registers.list()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000151 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
152 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.list()));
Zheng Xuda403092015-04-24 17:35:39 +0800153
154 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
155 UseScratchRegisterScope temps(masm);
156
157 Register base = masm->StackPointer();
158 int64_t core_spill_size = core_list.TotalSizeInBytes();
159 int64_t fp_spill_size = fp_list.TotalSizeInBytes();
160 int64_t reg_size = kXRegSizeInBytes;
161 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
162 uint32_t ls_access_size = WhichPowerOf2(reg_size);
163 if (((core_list.Count() > 1) || (fp_list.Count() > 1)) &&
164 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
165 // If the offset does not fit in the instruction's immediate field, use an alternate register
166 // to compute the base address(float point registers spill base address).
167 Register new_base = temps.AcquireSameSizeAs(base);
168 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
169 base = new_base;
170 spill_offset = -core_spill_size;
171 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
172 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
173 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
174 }
175
176 if (is_save) {
177 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
178 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
179 } else {
180 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
181 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
182 }
183}
184
185void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
186 RegisterSet* register_set = locations->GetLiveRegisters();
187 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
188 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
189 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
190 // If the register holds an object, update the stack mask.
191 if (locations->RegisterContainsObject(i)) {
192 locations->SetStackBit(stack_offset / kVRegSize);
193 }
194 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
195 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
196 saved_core_stack_offsets_[i] = stack_offset;
197 stack_offset += kXRegSizeInBytes;
198 }
199 }
200
201 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
202 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
203 register_set->ContainsFloatingPointRegister(i)) {
204 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
205 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
206 saved_fpu_stack_offsets_[i] = stack_offset;
207 stack_offset += kDRegSizeInBytes;
208 }
209 }
210
211 SaveRestoreLiveRegistersHelper(codegen, register_set,
212 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
213}
214
215void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
216 RegisterSet* register_set = locations->GetLiveRegisters();
217 SaveRestoreLiveRegistersHelper(codegen, register_set,
218 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
219}
220
Alexandre Rames5319def2014-10-23 10:03:10 +0100221class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
222 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000223 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100224
Alexandre Rames67555f72014-11-18 10:55:16 +0000225 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100226 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000227 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100228
Alexandre Rames5319def2014-10-23 10:03:10 +0100229 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000230 if (instruction_->CanThrowIntoCatchBlock()) {
231 // Live registers will be restored in the catch block if caught.
232 SaveLiveRegisters(codegen, instruction_->GetLocations());
233 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000234 // We're moving two locations to locations that could overlap, so we need a parallel
235 // move resolver.
236 InvokeRuntimeCallingConvention calling_convention;
237 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100238 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
239 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000240 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000241 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800242 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100243 }
244
Alexandre Rames8158f282015-08-07 10:26:17 +0100245 bool IsFatal() const OVERRIDE { return true; }
246
Alexandre Rames9931f312015-06-19 14:47:01 +0100247 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
248
Alexandre Rames5319def2014-10-23 10:03:10 +0100249 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100250 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
251};
252
Alexandre Rames67555f72014-11-18 10:55:16 +0000253class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
254 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000255 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000256
257 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
258 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
259 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000260 if (instruction_->CanThrowIntoCatchBlock()) {
261 // Live registers will be restored in the catch block if caught.
262 SaveLiveRegisters(codegen, instruction_->GetLocations());
263 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000264 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000265 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800266 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000267 }
268
Alexandre Rames8158f282015-08-07 10:26:17 +0100269 bool IsFatal() const OVERRIDE { return true; }
270
Alexandre Rames9931f312015-06-19 14:47:01 +0100271 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
272
Alexandre Rames67555f72014-11-18 10:55:16 +0000273 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000274 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
275};
276
277class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
278 public:
279 LoadClassSlowPathARM64(HLoadClass* cls,
280 HInstruction* at,
281 uint32_t dex_pc,
282 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000283 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000284 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
285 }
286
287 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
288 LocationSummary* locations = at_->GetLocations();
289 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
290
291 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000292 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000293
294 InvokeRuntimeCallingConvention calling_convention;
295 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000296 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
297 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000298 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800299 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100300 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800301 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100302 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800303 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000304
305 // Move the class to the desired location.
306 Location out = locations->Out();
307 if (out.IsValid()) {
308 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
309 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000310 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000311 }
312
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000313 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000314 __ B(GetExitLabel());
315 }
316
Alexandre Rames9931f312015-06-19 14:47:01 +0100317 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
318
Alexandre Rames67555f72014-11-18 10:55:16 +0000319 private:
320 // The class this slow path will load.
321 HLoadClass* const cls_;
322
323 // The instruction where this slow path is happening.
324 // (Might be the load class or an initialization check).
325 HInstruction* const at_;
326
327 // The dex PC of `at_`.
328 const uint32_t dex_pc_;
329
330 // Whether to initialize the class.
331 const bool do_clinit_;
332
333 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
334};
335
336class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
337 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000338 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000339
340 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
341 LocationSummary* locations = instruction_->GetLocations();
342 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
343 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
344
345 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000346 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000347
348 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000349 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
350 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Alexandre Rames67555f72014-11-18 10:55:16 +0000351 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000352 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100353 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000354 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000355 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000356
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000357 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000358 __ B(GetExitLabel());
359 }
360
Alexandre Rames9931f312015-06-19 14:47:01 +0100361 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
362
Alexandre Rames67555f72014-11-18 10:55:16 +0000363 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000364 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
365};
366
Alexandre Rames5319def2014-10-23 10:03:10 +0100367class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
368 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000369 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100370
Alexandre Rames67555f72014-11-18 10:55:16 +0000371 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
372 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100373 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000374 if (instruction_->CanThrowIntoCatchBlock()) {
375 // Live registers will be restored in the catch block if caught.
376 SaveLiveRegisters(codegen, instruction_->GetLocations());
377 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000378 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000379 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800380 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100381 }
382
Alexandre Rames8158f282015-08-07 10:26:17 +0100383 bool IsFatal() const OVERRIDE { return true; }
384
Alexandre Rames9931f312015-06-19 14:47:01 +0100385 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
386
Alexandre Rames5319def2014-10-23 10:03:10 +0100387 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100388 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
389};
390
391class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
392 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100393 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000394 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100395
Alexandre Rames67555f72014-11-18 10:55:16 +0000396 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
397 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100398 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000399 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000400 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000401 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800402 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000403 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000404 if (successor_ == nullptr) {
405 __ B(GetReturnLabel());
406 } else {
407 __ B(arm64_codegen->GetLabelOf(successor_));
408 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100409 }
410
411 vixl::Label* GetReturnLabel() {
412 DCHECK(successor_ == nullptr);
413 return &return_label_;
414 }
415
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100416 HBasicBlock* GetSuccessor() const {
417 return successor_;
418 }
419
Alexandre Rames9931f312015-06-19 14:47:01 +0100420 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
421
Alexandre Rames5319def2014-10-23 10:03:10 +0100422 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100423 // If not null, the block to branch to after the suspend check.
424 HBasicBlock* const successor_;
425
426 // If `successor_` is null, the label to branch to after the suspend check.
427 vixl::Label return_label_;
428
429 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
430};
431
Alexandre Rames67555f72014-11-18 10:55:16 +0000432class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
433 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000434 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000435 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000436
437 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000438 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100439 Location class_to_check = locations->InAt(1);
440 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
441 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000442 DCHECK(instruction_->IsCheckCast()
443 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
444 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100445 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000446
Alexandre Rames67555f72014-11-18 10:55:16 +0000447 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000448
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000449 if (!is_fatal_) {
450 SaveLiveRegisters(codegen, locations);
451 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000452
453 // We're moving two locations to locations that could overlap, so we need a parallel
454 // move resolver.
455 InvokeRuntimeCallingConvention calling_convention;
456 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100457 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
458 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000459
460 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000461 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100462 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Roland Levillain888d0672015-11-23 18:53:50 +0000463 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
464 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000465 Primitive::Type ret_type = instruction_->GetType();
466 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
467 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
468 } else {
469 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100470 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800471 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000472 }
473
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000474 if (!is_fatal_) {
475 RestoreLiveRegisters(codegen, locations);
476 __ B(GetExitLabel());
477 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000478 }
479
Alexandre Rames9931f312015-06-19 14:47:01 +0100480 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000481 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100482
Alexandre Rames67555f72014-11-18 10:55:16 +0000483 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000484 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000485
Alexandre Rames67555f72014-11-18 10:55:16 +0000486 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
487};
488
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700489class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
490 public:
Aart Bik42249c32016-01-07 15:33:50 -0800491 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000492 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700493
494 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800495 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700496 __ Bind(GetEntryLabel());
497 SaveLiveRegisters(codegen, instruction_->GetLocations());
Aart Bik42249c32016-01-07 15:33:50 -0800498 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
499 instruction_,
500 instruction_->GetDexPc(),
501 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000502 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700503 }
504
Alexandre Rames9931f312015-06-19 14:47:01 +0100505 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
506
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700507 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700508 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
509};
510
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100511class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
512 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000513 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100514
515 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
516 LocationSummary* locations = instruction_->GetLocations();
517 __ Bind(GetEntryLabel());
518 SaveLiveRegisters(codegen, locations);
519
520 InvokeRuntimeCallingConvention calling_convention;
521 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
522 parallel_move.AddMove(
523 locations->InAt(0),
524 LocationFrom(calling_convention.GetRegisterAt(0)),
525 Primitive::kPrimNot,
526 nullptr);
527 parallel_move.AddMove(
528 locations->InAt(1),
529 LocationFrom(calling_convention.GetRegisterAt(1)),
530 Primitive::kPrimInt,
531 nullptr);
532 parallel_move.AddMove(
533 locations->InAt(2),
534 LocationFrom(calling_convention.GetRegisterAt(2)),
535 Primitive::kPrimNot,
536 nullptr);
537 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
538
539 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
540 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
541 instruction_,
542 instruction_->GetDexPc(),
543 this);
544 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
545 RestoreLiveRegisters(codegen, locations);
546 __ B(GetExitLabel());
547 }
548
549 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
550
551 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100552 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
553};
554
Zheng Xu3927c8b2015-11-18 17:46:25 +0800555void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
556 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000557 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800558
559 // We are about to use the assembler to place literals directly. Make sure we have enough
560 // underlying code buffer and we have generated the jump table with right size.
561 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
562 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
563
564 __ Bind(&table_start_);
565 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
566 for (uint32_t i = 0; i < num_entries; i++) {
567 vixl::Label* target_label = codegen->GetLabelOf(successors[i]);
568 DCHECK(target_label->IsBound());
569 ptrdiff_t jump_offset = target_label->location() - table_start_.location();
570 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
571 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
572 Literal<int32_t> literal(jump_offset);
573 __ place(&literal);
574 }
575}
576
Roland Levillain44015862016-01-22 11:47:17 +0000577// Slow path marking an object during a read barrier.
578class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
579 public:
580 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000581 : SlowPathCodeARM64(instruction), out_(out), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000582 DCHECK(kEmitCompilerReadBarrier);
583 }
584
585 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
586
587 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
588 LocationSummary* locations = instruction_->GetLocations();
589 Primitive::Type type = Primitive::kPrimNot;
590 DCHECK(locations->CanCall());
591 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
592 DCHECK(instruction_->IsInstanceFieldGet() ||
593 instruction_->IsStaticFieldGet() ||
594 instruction_->IsArrayGet() ||
595 instruction_->IsLoadClass() ||
596 instruction_->IsLoadString() ||
597 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100598 instruction_->IsCheckCast() ||
599 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
600 instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000601 << "Unexpected instruction in read barrier marking slow path: "
602 << instruction_->DebugName();
603
604 __ Bind(GetEntryLabel());
605 SaveLiveRegisters(codegen, locations);
606
607 InvokeRuntimeCallingConvention calling_convention;
608 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
609 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)), obj_, type);
610 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
611 instruction_,
612 instruction_->GetDexPc(),
613 this);
614 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
615 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
616
617 RestoreLiveRegisters(codegen, locations);
618 __ B(GetExitLabel());
619 }
620
621 private:
Roland Levillain44015862016-01-22 11:47:17 +0000622 const Location out_;
623 const Location obj_;
624
625 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
626};
627
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000628// Slow path generating a read barrier for a heap reference.
629class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
630 public:
631 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
632 Location out,
633 Location ref,
634 Location obj,
635 uint32_t offset,
636 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000637 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000638 out_(out),
639 ref_(ref),
640 obj_(obj),
641 offset_(offset),
642 index_(index) {
643 DCHECK(kEmitCompilerReadBarrier);
644 // If `obj` is equal to `out` or `ref`, it means the initial object
645 // has been overwritten by (or after) the heap object reference load
646 // to be instrumented, e.g.:
647 //
648 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000649 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000650 //
651 // In that case, we have lost the information about the original
652 // object, and the emitted read barrier cannot work properly.
653 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
654 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
655 }
656
657 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
658 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
659 LocationSummary* locations = instruction_->GetLocations();
660 Primitive::Type type = Primitive::kPrimNot;
661 DCHECK(locations->CanCall());
662 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100663 DCHECK(instruction_->IsInstanceFieldGet() ||
664 instruction_->IsStaticFieldGet() ||
665 instruction_->IsArrayGet() ||
666 instruction_->IsInstanceOf() ||
667 instruction_->IsCheckCast() ||
668 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
Roland Levillain44015862016-01-22 11:47:17 +0000669 instruction_->GetLocations()->Intrinsified()))
670 << "Unexpected instruction in read barrier for heap reference slow path: "
671 << instruction_->DebugName();
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000672 // The read barrier instrumentation does not support the
673 // HArm64IntermediateAddress instruction yet.
674 DCHECK(!(instruction_->IsArrayGet() &&
675 instruction_->AsArrayGet()->GetArray()->IsArm64IntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000676
677 __ Bind(GetEntryLabel());
678
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000679 SaveLiveRegisters(codegen, locations);
680
681 // We may have to change the index's value, but as `index_` is a
682 // constant member (like other "inputs" of this slow path),
683 // introduce a copy of it, `index`.
684 Location index = index_;
685 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100686 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000687 if (instruction_->IsArrayGet()) {
688 // Compute the actual memory offset and store it in `index`.
689 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
690 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
691 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
692 // We are about to change the value of `index_reg` (see the
693 // calls to vixl::MacroAssembler::Lsl and
694 // vixl::MacroAssembler::Mov below), but it has
695 // not been saved by the previous call to
696 // art::SlowPathCode::SaveLiveRegisters, as it is a
697 // callee-save register --
698 // art::SlowPathCode::SaveLiveRegisters does not consider
699 // callee-save registers, as it has been designed with the
700 // assumption that callee-save registers are supposed to be
701 // handled by the called function. So, as a callee-save
702 // register, `index_reg` _would_ eventually be saved onto
703 // the stack, but it would be too late: we would have
704 // changed its value earlier. Therefore, we manually save
705 // it here into another freely available register,
706 // `free_reg`, chosen of course among the caller-save
707 // registers (as a callee-save `free_reg` register would
708 // exhibit the same problem).
709 //
710 // Note we could have requested a temporary register from
711 // the register allocator instead; but we prefer not to, as
712 // this is a slow path, and we know we can find a
713 // caller-save register that is available.
714 Register free_reg = FindAvailableCallerSaveRegister(codegen);
715 __ Mov(free_reg.W(), index_reg);
716 index_reg = free_reg;
717 index = LocationFrom(index_reg);
718 } else {
719 // The initial register stored in `index_` has already been
720 // saved in the call to art::SlowPathCode::SaveLiveRegisters
721 // (as it is not a callee-save register), so we can freely
722 // use it.
723 }
724 // Shifting the index value contained in `index_reg` by the scale
725 // factor (2) cannot overflow in practice, as the runtime is
726 // unable to allocate object arrays with a size larger than
727 // 2^26 - 1 (that is, 2^28 - 4 bytes).
728 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
729 static_assert(
730 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
731 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
732 __ Add(index_reg, index_reg, Operand(offset_));
733 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100734 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
735 // intrinsics, `index_` is not shifted by a scale factor of 2
736 // (as in the case of ArrayGet), as it is actually an offset
737 // to an object field within an object.
738 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000739 DCHECK(instruction_->GetLocations()->Intrinsified());
740 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
741 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
742 << instruction_->AsInvoke()->GetIntrinsic();
743 DCHECK_EQ(offset_, 0U);
744 DCHECK(index_.IsRegisterPair());
745 // UnsafeGet's offset location is a register pair, the low
746 // part contains the correct offset.
747 index = index_.ToLow();
748 }
749 }
750
751 // We're moving two or three locations to locations that could
752 // overlap, so we need a parallel move resolver.
753 InvokeRuntimeCallingConvention calling_convention;
754 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
755 parallel_move.AddMove(ref_,
756 LocationFrom(calling_convention.GetRegisterAt(0)),
757 type,
758 nullptr);
759 parallel_move.AddMove(obj_,
760 LocationFrom(calling_convention.GetRegisterAt(1)),
761 type,
762 nullptr);
763 if (index.IsValid()) {
764 parallel_move.AddMove(index,
765 LocationFrom(calling_convention.GetRegisterAt(2)),
766 Primitive::kPrimInt,
767 nullptr);
768 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
769 } else {
770 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
771 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
772 }
773 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
774 instruction_,
775 instruction_->GetDexPc(),
776 this);
777 CheckEntrypointTypes<
778 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
779 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
780
781 RestoreLiveRegisters(codegen, locations);
782
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000783 __ B(GetExitLabel());
784 }
785
786 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
787
788 private:
789 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
790 size_t ref = static_cast<int>(XRegisterFrom(ref_).code());
791 size_t obj = static_cast<int>(XRegisterFrom(obj_).code());
792 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
793 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
794 return Register(VIXLRegCodeFromART(i), kXRegSize);
795 }
796 }
797 // We shall never fail to find a free caller-save register, as
798 // there are more than two core caller-save registers on ARM64
799 // (meaning it is possible to find one which is different from
800 // `ref` and `obj`).
801 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
802 LOG(FATAL) << "Could not find a free register";
803 UNREACHABLE();
804 }
805
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000806 const Location out_;
807 const Location ref_;
808 const Location obj_;
809 const uint32_t offset_;
810 // An additional location containing an index to an array.
811 // Only used for HArrayGet and the UnsafeGetObject &
812 // UnsafeGetObjectVolatile intrinsics.
813 const Location index_;
814
815 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
816};
817
818// Slow path generating a read barrier for a GC root.
819class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
820 public:
821 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000822 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000823 DCHECK(kEmitCompilerReadBarrier);
824 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000825
826 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
827 LocationSummary* locations = instruction_->GetLocations();
828 Primitive::Type type = Primitive::kPrimNot;
829 DCHECK(locations->CanCall());
830 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000831 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
832 << "Unexpected instruction in read barrier for GC root slow path: "
833 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000834
835 __ Bind(GetEntryLabel());
836 SaveLiveRegisters(codegen, locations);
837
838 InvokeRuntimeCallingConvention calling_convention;
839 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
840 // The argument of the ReadBarrierForRootSlow is not a managed
841 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
842 // thus we need a 64-bit move here, and we cannot use
843 //
844 // arm64_codegen->MoveLocation(
845 // LocationFrom(calling_convention.GetRegisterAt(0)),
846 // root_,
847 // type);
848 //
849 // which would emit a 32-bit move, as `type` is a (32-bit wide)
850 // reference type (`Primitive::kPrimNot`).
851 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
852 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
853 instruction_,
854 instruction_->GetDexPc(),
855 this);
856 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
857 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
858
859 RestoreLiveRegisters(codegen, locations);
860 __ B(GetExitLabel());
861 }
862
863 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
864
865 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000866 const Location out_;
867 const Location root_;
868
869 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
870};
871
Alexandre Rames5319def2014-10-23 10:03:10 +0100872#undef __
873
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100874Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100875 Location next_location;
876 if (type == Primitive::kPrimVoid) {
877 LOG(FATAL) << "Unreachable type " << type;
878 }
879
Alexandre Rames542361f2015-01-29 16:57:31 +0000880 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100881 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
882 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000883 } else if (!Primitive::IsFloatingPointType(type) &&
884 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000885 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
886 } else {
887 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000888 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
889 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100890 }
891
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000892 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000893 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100894 return next_location;
895}
896
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100897Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100898 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100899}
900
Serban Constantinescu579885a2015-02-22 20:51:33 +0000901CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
902 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100903 const CompilerOptions& compiler_options,
904 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100905 : CodeGenerator(graph,
906 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000907 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000908 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000909 callee_saved_core_registers.list(),
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000910 callee_saved_fp_registers.list(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100911 compiler_options,
912 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100913 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800914 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100915 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000916 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000917 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100918 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000919 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000920 uint32_literals_(std::less<uint32_t>(),
921 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100922 uint64_literals_(std::less<uint64_t>(),
923 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
924 method_patches_(MethodReferenceComparator(),
925 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
926 call_patches_(MethodReferenceComparator(),
927 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
928 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000929 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
930 boot_image_string_patches_(StringReferenceValueComparator(),
931 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
932 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
933 boot_image_address_patches_(std::less<uint32_t>(),
934 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000935 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000936 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000937}
Alexandre Rames5319def2014-10-23 10:03:10 +0100938
Alexandre Rames67555f72014-11-18 10:55:16 +0000939#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100940
Zheng Xu3927c8b2015-11-18 17:46:25 +0800941void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100942 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800943 jump_table->EmitTable(this);
944 }
945}
946
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000947void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800948 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000949 // Ensure we emit the literal pool.
950 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000951
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000952 CodeGenerator::Finalize(allocator);
953}
954
Zheng Xuad4450e2015-04-17 18:48:56 +0800955void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
956 // Note: There are 6 kinds of moves:
957 // 1. constant -> GPR/FPR (non-cycle)
958 // 2. constant -> stack (non-cycle)
959 // 3. GPR/FPR -> GPR/FPR
960 // 4. GPR/FPR -> stack
961 // 5. stack -> GPR/FPR
962 // 6. stack -> stack (non-cycle)
963 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
964 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
965 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
966 // dependency.
967 vixl_temps_.Open(GetVIXLAssembler());
968}
969
970void ParallelMoveResolverARM64::FinishEmitNativeCode() {
971 vixl_temps_.Close();
972}
973
974Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
975 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
976 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
977 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
978 Location scratch = GetScratchLocation(kind);
979 if (!scratch.Equals(Location::NoLocation())) {
980 return scratch;
981 }
982 // Allocate from VIXL temp registers.
983 if (kind == Location::kRegister) {
984 scratch = LocationFrom(vixl_temps_.AcquireX());
985 } else {
986 DCHECK(kind == Location::kFpuRegister);
987 scratch = LocationFrom(vixl_temps_.AcquireD());
988 }
989 AddScratchLocation(scratch);
990 return scratch;
991}
992
993void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
994 if (loc.IsRegister()) {
995 vixl_temps_.Release(XRegisterFrom(loc));
996 } else {
997 DCHECK(loc.IsFpuRegister());
998 vixl_temps_.Release(DRegisterFrom(loc));
999 }
1000 RemoveScratchLocation(loc);
1001}
1002
Alexandre Rames3e69f162014-12-10 10:36:50 +00001003void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001004 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001005 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001006}
1007
Alexandre Rames5319def2014-10-23 10:03:10 +01001008void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001009 MacroAssembler* masm = GetVIXLAssembler();
1010 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001011 __ Bind(&frame_entry_label_);
1012
Serban Constantinescu02164b32014-11-13 14:05:07 +00001013 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1014 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001015 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001016 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001017 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001018 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001019 __ Ldr(wzr, MemOperand(temp, 0));
1020 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001021 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001022
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001023 if (!HasEmptyFrame()) {
1024 int frame_size = GetFrameSize();
1025 // Stack layout:
1026 // sp[frame_size - 8] : lr.
1027 // ... : other preserved core registers.
1028 // ... : other preserved fp registers.
1029 // ... : reserved frame space.
1030 // sp[0] : current method.
1031 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001032 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001033 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1034 frame_size - GetCoreSpillSize());
1035 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1036 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001037 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001038}
1039
1040void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001041 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001042 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001043 if (!HasEmptyFrame()) {
1044 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001045 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1046 frame_size - FrameEntrySpillSize());
1047 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1048 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001049 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001050 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001051 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001052 __ Ret();
1053 GetAssembler()->cfi().RestoreState();
1054 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001055}
1056
Zheng Xuda403092015-04-24 17:35:39 +08001057vixl::CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
1058 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
1059 return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize,
1060 core_spill_mask_);
1061}
1062
1063vixl::CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
1064 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1065 GetNumberOfFloatingPointRegisters()));
1066 return vixl::CPURegList(vixl::CPURegister::kFPRegister, vixl::kDRegSize,
1067 fpu_spill_mask_);
1068}
1069
Alexandre Rames5319def2014-10-23 10:03:10 +01001070void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1071 __ Bind(GetLabelOf(block));
1072}
1073
Calin Juravle175dc732015-08-25 15:42:32 +01001074void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1075 DCHECK(location.IsRegister());
1076 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1077}
1078
Calin Juravlee460d1d2015-09-29 04:52:17 +01001079void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1080 if (location.IsRegister()) {
1081 locations->AddTemp(location);
1082 } else {
1083 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1084 }
1085}
1086
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001087void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001088 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001089 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001090 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +01001091 vixl::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001092 if (value_can_be_null) {
1093 __ Cbz(value, &done);
1094 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001095 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
1096 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001097 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001098 if (value_can_be_null) {
1099 __ Bind(&done);
1100 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001101}
1102
David Brazdil58282f42016-01-14 12:45:10 +00001103void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001104 // Blocked core registers:
1105 // lr : Runtime reserved.
1106 // tr : Runtime reserved.
1107 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1108 // ip1 : VIXL core temp.
1109 // ip0 : VIXL core temp.
1110 //
1111 // Blocked fp registers:
1112 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001113 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1114 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001115 while (!reserved_core_registers.IsEmpty()) {
1116 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
1117 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001118
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001119 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001120 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001121 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
1122 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001123
David Brazdil58282f42016-01-14 12:45:10 +00001124 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001125 // Stubs do not save callee-save floating point registers. If the graph
1126 // is debuggable, we need to deal with these registers differently. For
1127 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001128 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1129 while (!reserved_fp_registers_debuggable.IsEmpty()) {
1130 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().code()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001131 }
1132 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001133}
1134
Alexandre Rames3e69f162014-12-10 10:36:50 +00001135size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1136 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1137 __ Str(reg, MemOperand(sp, stack_index));
1138 return kArm64WordSize;
1139}
1140
1141size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1142 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1143 __ Ldr(reg, MemOperand(sp, stack_index));
1144 return kArm64WordSize;
1145}
1146
1147size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1148 FPRegister reg = FPRegister(reg_id, kDRegSize);
1149 __ Str(reg, MemOperand(sp, stack_index));
1150 return kArm64WordSize;
1151}
1152
1153size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1154 FPRegister reg = FPRegister(reg_id, kDRegSize);
1155 __ Ldr(reg, MemOperand(sp, stack_index));
1156 return kArm64WordSize;
1157}
1158
Alexandre Rames5319def2014-10-23 10:03:10 +01001159void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001160 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001161}
1162
1163void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001164 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001165}
1166
Alexandre Rames67555f72014-11-18 10:55:16 +00001167void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001168 if (constant->IsIntConstant()) {
1169 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1170 } else if (constant->IsLongConstant()) {
1171 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1172 } else if (constant->IsNullConstant()) {
1173 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001174 } else if (constant->IsFloatConstant()) {
1175 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1176 } else {
1177 DCHECK(constant->IsDoubleConstant());
1178 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1179 }
1180}
1181
Alexandre Rames3e69f162014-12-10 10:36:50 +00001182
1183static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1184 DCHECK(constant.IsConstant());
1185 HConstant* cst = constant.GetConstant();
1186 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001187 // Null is mapped to a core W register, which we associate with kPrimInt.
1188 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001189 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1190 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1191 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1192}
1193
Calin Juravlee460d1d2015-09-29 04:52:17 +01001194void CodeGeneratorARM64::MoveLocation(Location destination,
1195 Location source,
1196 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001197 if (source.Equals(destination)) {
1198 return;
1199 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001200
1201 // A valid move can always be inferred from the destination and source
1202 // locations. When moving from and to a register, the argument type can be
1203 // used to generate 32bit instead of 64bit moves. In debug mode we also
1204 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001205 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001206
1207 if (destination.IsRegister() || destination.IsFpuRegister()) {
1208 if (unspecified_type) {
1209 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1210 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001211 (src_cst != nullptr && (src_cst->IsIntConstant()
1212 || src_cst->IsFloatConstant()
1213 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001214 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001215 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001216 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001217 // If the source is a double stack slot or a 64bit constant, a 64bit
1218 // type is appropriate. Else the source is a register, and since the
1219 // type has not been specified, we chose a 64bit type to force a 64bit
1220 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001221 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001222 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001223 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001224 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1225 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1226 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001227 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1228 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1229 __ Ldr(dst, StackOperandFrom(source));
1230 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001231 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001232 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001233 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001234 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001235 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001236 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001237 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001238 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1239 ? Primitive::kPrimLong
1240 : Primitive::kPrimInt;
1241 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1242 }
1243 } else {
1244 DCHECK(source.IsFpuRegister());
1245 if (destination.IsRegister()) {
1246 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1247 ? Primitive::kPrimDouble
1248 : Primitive::kPrimFloat;
1249 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1250 } else {
1251 DCHECK(destination.IsFpuRegister());
1252 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001253 }
1254 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001255 } else { // The destination is not a register. It must be a stack slot.
1256 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1257 if (source.IsRegister() || source.IsFpuRegister()) {
1258 if (unspecified_type) {
1259 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001260 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001261 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001262 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001263 }
1264 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001265 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1266 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1267 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001268 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001269 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1270 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001271 UseScratchRegisterScope temps(GetVIXLAssembler());
1272 HConstant* src_cst = source.GetConstant();
1273 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001274 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001275 temp = temps.AcquireW();
1276 } else if (src_cst->IsLongConstant()) {
1277 temp = temps.AcquireX();
1278 } else if (src_cst->IsFloatConstant()) {
1279 temp = temps.AcquireS();
1280 } else {
1281 DCHECK(src_cst->IsDoubleConstant());
1282 temp = temps.AcquireD();
1283 }
1284 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001285 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001286 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001287 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001288 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001289 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001290 // There is generally less pressure on FP registers.
1291 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001292 __ Ldr(temp, StackOperandFrom(source));
1293 __ Str(temp, StackOperandFrom(destination));
1294 }
1295 }
1296}
1297
1298void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001299 CPURegister dst,
1300 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001301 switch (type) {
1302 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001303 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001304 break;
1305 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001306 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001307 break;
1308 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001309 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001310 break;
1311 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001312 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001313 break;
1314 case Primitive::kPrimInt:
1315 case Primitive::kPrimNot:
1316 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001317 case Primitive::kPrimFloat:
1318 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001319 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001320 __ Ldr(dst, src);
1321 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001322 case Primitive::kPrimVoid:
1323 LOG(FATAL) << "Unreachable type " << type;
1324 }
1325}
1326
Calin Juravle77520bc2015-01-12 18:45:46 +00001327void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001328 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001329 const MemOperand& src,
1330 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001331 MacroAssembler* masm = GetVIXLAssembler();
1332 BlockPoolsScope block_pools(masm);
1333 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001334 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001335 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001336
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001337 DCHECK(!src.IsPreIndex());
1338 DCHECK(!src.IsPostIndex());
1339
1340 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001341 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001342 MemOperand base = MemOperand(temp_base);
1343 switch (type) {
1344 case Primitive::kPrimBoolean:
1345 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001346 if (needs_null_check) {
1347 MaybeRecordImplicitNullCheck(instruction);
1348 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001349 break;
1350 case Primitive::kPrimByte:
1351 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001352 if (needs_null_check) {
1353 MaybeRecordImplicitNullCheck(instruction);
1354 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001355 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1356 break;
1357 case Primitive::kPrimChar:
1358 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001359 if (needs_null_check) {
1360 MaybeRecordImplicitNullCheck(instruction);
1361 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001362 break;
1363 case Primitive::kPrimShort:
1364 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001365 if (needs_null_check) {
1366 MaybeRecordImplicitNullCheck(instruction);
1367 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001368 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1369 break;
1370 case Primitive::kPrimInt:
1371 case Primitive::kPrimNot:
1372 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001373 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001374 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001375 if (needs_null_check) {
1376 MaybeRecordImplicitNullCheck(instruction);
1377 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001378 break;
1379 case Primitive::kPrimFloat:
1380 case Primitive::kPrimDouble: {
1381 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001382 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001383
1384 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1385 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001386 if (needs_null_check) {
1387 MaybeRecordImplicitNullCheck(instruction);
1388 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001389 __ Fmov(FPRegister(dst), temp);
1390 break;
1391 }
1392 case Primitive::kPrimVoid:
1393 LOG(FATAL) << "Unreachable type " << type;
1394 }
1395}
1396
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001397void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001398 CPURegister src,
1399 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001400 switch (type) {
1401 case Primitive::kPrimBoolean:
1402 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001403 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001404 break;
1405 case Primitive::kPrimChar:
1406 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001407 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001408 break;
1409 case Primitive::kPrimInt:
1410 case Primitive::kPrimNot:
1411 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001412 case Primitive::kPrimFloat:
1413 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001414 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001415 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001416 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001417 case Primitive::kPrimVoid:
1418 LOG(FATAL) << "Unreachable type " << type;
1419 }
1420}
1421
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001422void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1423 CPURegister src,
1424 const MemOperand& dst) {
1425 UseScratchRegisterScope temps(GetVIXLAssembler());
1426 Register temp_base = temps.AcquireX();
1427
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001428 DCHECK(!dst.IsPreIndex());
1429 DCHECK(!dst.IsPostIndex());
1430
1431 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001432 Operand op = OperandFromMemOperand(dst);
1433 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001434 MemOperand base = MemOperand(temp_base);
1435 switch (type) {
1436 case Primitive::kPrimBoolean:
1437 case Primitive::kPrimByte:
1438 __ Stlrb(Register(src), base);
1439 break;
1440 case Primitive::kPrimChar:
1441 case Primitive::kPrimShort:
1442 __ Stlrh(Register(src), base);
1443 break;
1444 case Primitive::kPrimInt:
1445 case Primitive::kPrimNot:
1446 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001447 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001448 __ Stlr(Register(src), base);
1449 break;
1450 case Primitive::kPrimFloat:
1451 case Primitive::kPrimDouble: {
1452 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001453 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001454
1455 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1456 __ Fmov(temp, FPRegister(src));
1457 __ Stlr(temp, base);
1458 break;
1459 }
1460 case Primitive::kPrimVoid:
1461 LOG(FATAL) << "Unreachable type " << type;
1462 }
1463}
1464
Calin Juravle175dc732015-08-25 15:42:32 +01001465void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1466 HInstruction* instruction,
1467 uint32_t dex_pc,
1468 SlowPathCode* slow_path) {
1469 InvokeRuntime(GetThreadOffset<kArm64WordSize>(entrypoint).Int32Value(),
1470 instruction,
1471 dex_pc,
1472 slow_path);
1473}
1474
Alexandre Rames67555f72014-11-18 10:55:16 +00001475void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1476 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001477 uint32_t dex_pc,
1478 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001479 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001480 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001481 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1482 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001483 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001484}
1485
1486void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1487 vixl::Register class_reg) {
1488 UseScratchRegisterScope temps(GetVIXLAssembler());
1489 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001490 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1491
Serban Constantinescu02164b32014-11-13 14:05:07 +00001492 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001493 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1494 __ Add(temp, class_reg, status_offset);
1495 __ Ldar(temp, HeapOperand(temp));
1496 __ Cmp(temp, mirror::Class::kStatusInitialized);
1497 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001498 __ Bind(slow_path->GetExitLabel());
1499}
Alexandre Rames5319def2014-10-23 10:03:10 +01001500
Roland Levillain44015862016-01-22 11:47:17 +00001501void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001502 BarrierType type = BarrierAll;
1503
1504 switch (kind) {
1505 case MemBarrierKind::kAnyAny:
1506 case MemBarrierKind::kAnyStore: {
1507 type = BarrierAll;
1508 break;
1509 }
1510 case MemBarrierKind::kLoadAny: {
1511 type = BarrierReads;
1512 break;
1513 }
1514 case MemBarrierKind::kStoreStore: {
1515 type = BarrierWrites;
1516 break;
1517 }
1518 default:
1519 LOG(FATAL) << "Unexpected memory barrier " << kind;
1520 }
1521 __ Dmb(InnerShareable, type);
1522}
1523
Serban Constantinescu02164b32014-11-13 14:05:07 +00001524void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1525 HBasicBlock* successor) {
1526 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001527 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1528 if (slow_path == nullptr) {
1529 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1530 instruction->SetSlowPath(slow_path);
1531 codegen_->AddSlowPath(slow_path);
1532 if (successor != nullptr) {
1533 DCHECK(successor->IsLoopHeader());
1534 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1535 }
1536 } else {
1537 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1538 }
1539
Serban Constantinescu02164b32014-11-13 14:05:07 +00001540 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1541 Register temp = temps.AcquireW();
1542
1543 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1544 if (successor == nullptr) {
1545 __ Cbnz(temp, slow_path->GetEntryLabel());
1546 __ Bind(slow_path->GetReturnLabel());
1547 } else {
1548 __ Cbz(temp, codegen_->GetLabelOf(successor));
1549 __ B(slow_path->GetEntryLabel());
1550 // slow_path will return to GetLabelOf(successor).
1551 }
1552}
1553
Alexandre Rames5319def2014-10-23 10:03:10 +01001554InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1555 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001556 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001557 assembler_(codegen->GetAssembler()),
1558 codegen_(codegen) {}
1559
1560#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001561 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001562
1563#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1564
1565enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001566 // Using a base helps identify when we hit such breakpoints.
1567 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001568#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1569 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1570#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1571};
1572
1573#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001574 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001575 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1576 } \
1577 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1578 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1579 locations->SetOut(Location::Any()); \
1580 }
1581 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1582#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1583
1584#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001585#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001586
Alexandre Rames67555f72014-11-18 10:55:16 +00001587void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001588 DCHECK_EQ(instr->InputCount(), 2U);
1589 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1590 Primitive::Type type = instr->GetResultType();
1591 switch (type) {
1592 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001593 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001594 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001595 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001596 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001597 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001598
1599 case Primitive::kPrimFloat:
1600 case Primitive::kPrimDouble:
1601 locations->SetInAt(0, Location::RequiresFpuRegister());
1602 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001603 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001604 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001605
Alexandre Rames5319def2014-10-23 10:03:10 +01001606 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001607 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001608 }
1609}
1610
Alexandre Rames09a99962015-04-15 11:47:56 +01001611void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001612 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1613
1614 bool object_field_get_with_read_barrier =
1615 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001616 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001617 new (GetGraph()->GetArena()) LocationSummary(instruction,
1618 object_field_get_with_read_barrier ?
1619 LocationSummary::kCallOnSlowPath :
1620 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001621 locations->SetInAt(0, Location::RequiresRegister());
1622 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1623 locations->SetOut(Location::RequiresFpuRegister());
1624 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001625 // The output overlaps for an object field get when read barriers
1626 // are enabled: we do not want the load to overwrite the object's
1627 // location, as we need it to emit the read barrier.
1628 locations->SetOut(
1629 Location::RequiresRegister(),
1630 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001631 }
1632}
1633
1634void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1635 const FieldInfo& field_info) {
1636 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001637 LocationSummary* locations = instruction->GetLocations();
1638 Location base_loc = locations->InAt(0);
1639 Location out = locations->Out();
1640 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001641 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001642 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001643 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001644
Roland Levillain44015862016-01-22 11:47:17 +00001645 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1646 // Object FieldGet with Baker's read barrier case.
1647 MacroAssembler* masm = GetVIXLAssembler();
1648 UseScratchRegisterScope temps(masm);
1649 // /* HeapReference<Object> */ out = *(base + offset)
1650 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1651 Register temp = temps.AcquireW();
1652 // Note that potential implicit null checks are handled in this
1653 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1654 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1655 instruction,
1656 out,
1657 base,
1658 offset,
1659 temp,
1660 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001661 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001662 } else {
1663 // General case.
1664 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001665 // Note that a potential implicit null check is handled in this
1666 // CodeGeneratorARM64::LoadAcquire call.
1667 // NB: LoadAcquire will record the pc info if needed.
1668 codegen_->LoadAcquire(
1669 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001670 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001671 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001672 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001673 }
Roland Levillain44015862016-01-22 11:47:17 +00001674 if (field_type == Primitive::kPrimNot) {
1675 // If read barriers are enabled, emit read barriers other than
1676 // Baker's using a slow path (and also unpoison the loaded
1677 // reference, if heap poisoning is enabled).
1678 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1679 }
Roland Levillain4d027112015-07-01 15:41:14 +01001680 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001681}
1682
1683void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1684 LocationSummary* locations =
1685 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1686 locations->SetInAt(0, Location::RequiresRegister());
1687 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1688 locations->SetInAt(1, Location::RequiresFpuRegister());
1689 } else {
1690 locations->SetInAt(1, Location::RequiresRegister());
1691 }
1692}
1693
1694void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001695 const FieldInfo& field_info,
1696 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001697 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001698 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001699
1700 Register obj = InputRegisterAt(instruction, 0);
1701 CPURegister value = InputCPURegisterAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001702 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001703 Offset offset = field_info.GetFieldOffset();
1704 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001705
Roland Levillain4d027112015-07-01 15:41:14 +01001706 {
1707 // We use a block to end the scratch scope before the write barrier, thus
1708 // freeing the temporary registers so they can be used in `MarkGCCard`.
1709 UseScratchRegisterScope temps(GetVIXLAssembler());
1710
1711 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1712 DCHECK(value.IsW());
1713 Register temp = temps.AcquireW();
1714 __ Mov(temp, value.W());
1715 GetAssembler()->PoisonHeapReference(temp.W());
1716 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001717 }
Roland Levillain4d027112015-07-01 15:41:14 +01001718
1719 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001720 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1721 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001722 } else {
1723 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1724 codegen_->MaybeRecordImplicitNullCheck(instruction);
1725 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001726 }
1727
1728 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001729 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001730 }
1731}
1732
Alexandre Rames67555f72014-11-18 10:55:16 +00001733void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001734 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001735
1736 switch (type) {
1737 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001738 case Primitive::kPrimLong: {
1739 Register dst = OutputRegister(instr);
1740 Register lhs = InputRegisterAt(instr, 0);
1741 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001742 if (instr->IsAdd()) {
1743 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001744 } else if (instr->IsAnd()) {
1745 __ And(dst, lhs, rhs);
1746 } else if (instr->IsOr()) {
1747 __ Orr(dst, lhs, rhs);
1748 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001749 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001750 } else if (instr->IsRor()) {
1751 if (rhs.IsImmediate()) {
1752 uint32_t shift = rhs.immediate() & (lhs.SizeInBits() - 1);
1753 __ Ror(dst, lhs, shift);
1754 } else {
1755 // Ensure shift distance is in the same size register as the result. If
1756 // we are rotating a long and the shift comes in a w register originally,
1757 // we don't need to sxtw for use as an x since the shift distances are
1758 // all & reg_bits - 1.
1759 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1760 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001761 } else {
1762 DCHECK(instr->IsXor());
1763 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001764 }
1765 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001766 }
1767 case Primitive::kPrimFloat:
1768 case Primitive::kPrimDouble: {
1769 FPRegister dst = OutputFPRegister(instr);
1770 FPRegister lhs = InputFPRegisterAt(instr, 0);
1771 FPRegister rhs = InputFPRegisterAt(instr, 1);
1772 if (instr->IsAdd()) {
1773 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001774 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001775 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001776 } else {
1777 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001778 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001779 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001780 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001781 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001782 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001783 }
1784}
1785
Serban Constantinescu02164b32014-11-13 14:05:07 +00001786void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1787 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1788
1789 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1790 Primitive::Type type = instr->GetResultType();
1791 switch (type) {
1792 case Primitive::kPrimInt:
1793 case Primitive::kPrimLong: {
1794 locations->SetInAt(0, Location::RequiresRegister());
1795 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1796 locations->SetOut(Location::RequiresRegister());
1797 break;
1798 }
1799 default:
1800 LOG(FATAL) << "Unexpected shift type " << type;
1801 }
1802}
1803
1804void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1805 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1806
1807 Primitive::Type type = instr->GetType();
1808 switch (type) {
1809 case Primitive::kPrimInt:
1810 case Primitive::kPrimLong: {
1811 Register dst = OutputRegister(instr);
1812 Register lhs = InputRegisterAt(instr, 0);
1813 Operand rhs = InputOperandAt(instr, 1);
1814 if (rhs.IsImmediate()) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001815 uint32_t shift_value = rhs.immediate() &
1816 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001817 if (instr->IsShl()) {
1818 __ Lsl(dst, lhs, shift_value);
1819 } else if (instr->IsShr()) {
1820 __ Asr(dst, lhs, shift_value);
1821 } else {
1822 __ Lsr(dst, lhs, shift_value);
1823 }
1824 } else {
1825 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1826
1827 if (instr->IsShl()) {
1828 __ Lsl(dst, lhs, rhs_reg);
1829 } else if (instr->IsShr()) {
1830 __ Asr(dst, lhs, rhs_reg);
1831 } else {
1832 __ Lsr(dst, lhs, rhs_reg);
1833 }
1834 }
1835 break;
1836 }
1837 default:
1838 LOG(FATAL) << "Unexpected shift operation type " << type;
1839 }
1840}
1841
Alexandre Rames5319def2014-10-23 10:03:10 +01001842void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001843 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001844}
1845
1846void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001847 HandleBinaryOp(instruction);
1848}
1849
1850void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1851 HandleBinaryOp(instruction);
1852}
1853
1854void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1855 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001856}
1857
Artem Serov7fc63502016-02-09 17:15:29 +00001858void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001859 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1860 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1861 locations->SetInAt(0, Location::RequiresRegister());
1862 // There is no immediate variant of negated bitwise instructions in AArch64.
1863 locations->SetInAt(1, Location::RequiresRegister());
1864 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1865}
1866
Artem Serov7fc63502016-02-09 17:15:29 +00001867void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001868 Register dst = OutputRegister(instr);
1869 Register lhs = InputRegisterAt(instr, 0);
1870 Register rhs = InputRegisterAt(instr, 1);
1871
1872 switch (instr->GetOpKind()) {
1873 case HInstruction::kAnd:
1874 __ Bic(dst, lhs, rhs);
1875 break;
1876 case HInstruction::kOr:
1877 __ Orn(dst, lhs, rhs);
1878 break;
1879 case HInstruction::kXor:
1880 __ Eon(dst, lhs, rhs);
1881 break;
1882 default:
1883 LOG(FATAL) << "Unreachable";
1884 }
1885}
1886
Alexandre Rames8626b742015-11-25 16:28:08 +00001887void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1888 HArm64DataProcWithShifterOp* instruction) {
1889 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1890 instruction->GetType() == Primitive::kPrimLong);
1891 LocationSummary* locations =
1892 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1893 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1894 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1895 } else {
1896 locations->SetInAt(0, Location::RequiresRegister());
1897 }
1898 locations->SetInAt(1, Location::RequiresRegister());
1899 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1900}
1901
1902void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1903 HArm64DataProcWithShifterOp* instruction) {
1904 Primitive::Type type = instruction->GetType();
1905 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1906 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1907 Register out = OutputRegister(instruction);
1908 Register left;
1909 if (kind != HInstruction::kNeg) {
1910 left = InputRegisterAt(instruction, 0);
1911 }
1912 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1913 // shifter operand operation, the IR generating `right_reg` (input to the type
1914 // conversion) can have a different type from the current instruction's type,
1915 // so we manually indicate the type.
1916 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001917 int64_t shift_amount = instruction->GetShiftAmount() &
1918 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001919
1920 Operand right_operand(0);
1921
1922 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1923 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1924 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1925 } else {
1926 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1927 }
1928
1929 // Logical binary operations do not support extension operations in the
1930 // operand. Note that VIXL would still manage if it was passed by generating
1931 // the extension as a separate instruction.
1932 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1933 DCHECK(!right_operand.IsExtendedRegister() ||
1934 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1935 kind != HInstruction::kNeg));
1936 switch (kind) {
1937 case HInstruction::kAdd:
1938 __ Add(out, left, right_operand);
1939 break;
1940 case HInstruction::kAnd:
1941 __ And(out, left, right_operand);
1942 break;
1943 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001944 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001945 __ Neg(out, right_operand);
1946 break;
1947 case HInstruction::kOr:
1948 __ Orr(out, left, right_operand);
1949 break;
1950 case HInstruction::kSub:
1951 __ Sub(out, left, right_operand);
1952 break;
1953 case HInstruction::kXor:
1954 __ Eor(out, left, right_operand);
1955 break;
1956 default:
1957 LOG(FATAL) << "Unexpected operation kind: " << kind;
1958 UNREACHABLE();
1959 }
1960}
1961
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001962void LocationsBuilderARM64::VisitArm64IntermediateAddress(HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001963 // The read barrier instrumentation does not support the
1964 // HArm64IntermediateAddress instruction yet.
1965 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001966 LocationSummary* locations =
1967 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1968 locations->SetInAt(0, Location::RequiresRegister());
1969 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1970 locations->SetOut(Location::RequiresRegister());
1971}
1972
1973void InstructionCodeGeneratorARM64::VisitArm64IntermediateAddress(
1974 HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001975 // The read barrier instrumentation does not support the
1976 // HArm64IntermediateAddress instruction yet.
1977 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001978 __ Add(OutputRegister(instruction),
1979 InputRegisterAt(instruction, 0),
1980 Operand(InputOperandAt(instruction, 1)));
1981}
1982
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001983void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001984 LocationSummary* locations =
1985 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001986 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
1987 if (instr->GetOpKind() == HInstruction::kSub &&
1988 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00001989 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001990 // Don't allocate register for Mneg instruction.
1991 } else {
1992 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
1993 Location::RequiresRegister());
1994 }
1995 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
1996 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00001997 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1998}
1999
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002000void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002001 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002002 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2003 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002004
2005 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2006 // This fixup should be carried out for all multiply-accumulate instructions:
2007 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2008 if (instr->GetType() == Primitive::kPrimLong &&
2009 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2010 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
2011 vixl::Instruction* prev = masm->GetCursorAddress<vixl::Instruction*>() - vixl::kInstructionSize;
2012 if (prev->IsLoadOrStore()) {
2013 // Make sure we emit only exactly one nop.
2014 vixl::CodeBufferCheckScope scope(masm,
2015 vixl::kInstructionSize,
2016 vixl::CodeBufferCheckScope::kCheck,
2017 vixl::CodeBufferCheckScope::kExactSize);
2018 __ nop();
2019 }
2020 }
2021
2022 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002023 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002024 __ Madd(res, mul_left, mul_right, accumulator);
2025 } else {
2026 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002027 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002028 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002029 __ Mneg(res, mul_left, mul_right);
2030 } else {
2031 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2032 __ Msub(res, mul_left, mul_right, accumulator);
2033 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002034 }
2035}
2036
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002037void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002038 bool object_array_get_with_read_barrier =
2039 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002040 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002041 new (GetGraph()->GetArena()) LocationSummary(instruction,
2042 object_array_get_with_read_barrier ?
2043 LocationSummary::kCallOnSlowPath :
2044 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002045 locations->SetInAt(0, Location::RequiresRegister());
2046 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002047 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2048 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2049 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002050 // The output overlaps in the case of an object array get with
2051 // read barriers enabled: we do not want the move to overwrite the
2052 // array's location, as we need it to emit the read barrier.
2053 locations->SetOut(
2054 Location::RequiresRegister(),
2055 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002056 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002057}
2058
2059void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002060 Primitive::Type type = instruction->GetType();
2061 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002062 LocationSummary* locations = instruction->GetLocations();
2063 Location index = locations->InAt(1);
2064 uint32_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Roland Levillain44015862016-01-22 11:47:17 +00002065 Location out = locations->Out();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002066
Alexandre Ramesd921d642015-04-16 15:07:16 +01002067 MacroAssembler* masm = GetVIXLAssembler();
2068 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002069 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002070 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002071
Roland Levillain44015862016-01-22 11:47:17 +00002072 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2073 // Object ArrayGet with Baker's read barrier case.
2074 Register temp = temps.AcquireW();
2075 // The read barrier instrumentation does not support the
2076 // HArm64IntermediateAddress instruction yet.
2077 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
2078 // Note that a potential implicit null check is handled in the
2079 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2080 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2081 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002082 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002083 // General case.
2084 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002085 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002086 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2087 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002088 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002089 Register temp = temps.AcquireSameSizeAs(obj);
2090 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
2091 // The read barrier instrumentation does not support the
2092 // HArm64IntermediateAddress instruction yet.
2093 DCHECK(!kEmitCompilerReadBarrier);
2094 // We do not need to compute the intermediate address from the array: the
2095 // input instruction has done it already. See the comment in
2096 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2097 if (kIsDebugBuild) {
2098 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2099 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2100 }
2101 temp = obj;
2102 } else {
2103 __ Add(temp, obj, offset);
2104 }
2105 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2106 }
2107
2108 codegen_->Load(type, OutputCPURegister(instruction), source);
2109 codegen_->MaybeRecordImplicitNullCheck(instruction);
2110
2111 if (type == Primitive::kPrimNot) {
2112 static_assert(
2113 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2114 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2115 Location obj_loc = locations->InAt(0);
2116 if (index.IsConstant()) {
2117 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2118 } else {
2119 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2120 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002121 }
Roland Levillain4d027112015-07-01 15:41:14 +01002122 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002123}
2124
Alexandre Rames5319def2014-10-23 10:03:10 +01002125void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2126 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2127 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002128 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002129}
2130
2131void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002132 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002133 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Markodce016e2016-04-28 13:10:02 +01002134 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002135 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002136}
2137
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002138void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002139 Primitive::Type value_type = instruction->GetComponentType();
2140
2141 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2142 bool object_array_set_with_read_barrier =
2143 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002144 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2145 instruction,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002146 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
2147 LocationSummary::kCallOnSlowPath :
2148 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002149 locations->SetInAt(0, Location::RequiresRegister());
2150 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002151 if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002152 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002153 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002154 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002155 }
2156}
2157
2158void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2159 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002160 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002161 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002162 bool needs_write_barrier =
2163 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002164
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002165 Register array = InputRegisterAt(instruction, 0);
2166 CPURegister value = InputCPURegisterAt(instruction, 2);
2167 CPURegister source = value;
2168 Location index = locations->InAt(1);
2169 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2170 MemOperand destination = HeapOperand(array);
2171 MacroAssembler* masm = GetVIXLAssembler();
2172 BlockPoolsScope block_pools(masm);
2173
2174 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002175 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002176 if (index.IsConstant()) {
2177 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2178 destination = HeapOperand(array, offset);
2179 } else {
2180 UseScratchRegisterScope temps(masm);
2181 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002182 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00002183 // The read barrier instrumentation does not support the
2184 // HArm64IntermediateAddress instruction yet.
2185 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002186 // We do not need to compute the intermediate address from the array: the
2187 // input instruction has done it already. See the comment in
2188 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2189 if (kIsDebugBuild) {
2190 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2191 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2192 }
2193 temp = array;
2194 } else {
2195 __ Add(temp, array, offset);
2196 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002197 destination = HeapOperand(temp,
2198 XRegisterFrom(index),
2199 LSL,
2200 Primitive::ComponentSizeShift(value_type));
2201 }
2202 codegen_->Store(value_type, value, destination);
2203 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002204 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002205 DCHECK(needs_write_barrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002206 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002207 vixl::Label done;
2208 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002209 {
2210 // We use a block to end the scratch scope before the write barrier, thus
2211 // freeing the temporary registers so they can be used in `MarkGCCard`.
2212 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002213 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002214 if (index.IsConstant()) {
2215 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002216 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002217 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002218 destination = HeapOperand(temp,
2219 XRegisterFrom(index),
2220 LSL,
2221 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002222 }
2223
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002224 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2225 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2226 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2227
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002228 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002229 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2230 codegen_->AddSlowPath(slow_path);
2231 if (instruction->GetValueCanBeNull()) {
2232 vixl::Label non_zero;
2233 __ Cbnz(Register(value), &non_zero);
2234 if (!index.IsConstant()) {
2235 __ Add(temp, array, offset);
2236 }
2237 __ Str(wzr, destination);
2238 codegen_->MaybeRecordImplicitNullCheck(instruction);
2239 __ B(&done);
2240 __ Bind(&non_zero);
2241 }
2242
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002243 if (kEmitCompilerReadBarrier) {
2244 // When read barriers are enabled, the type checking
2245 // instrumentation requires two read barriers:
2246 //
2247 // __ Mov(temp2, temp);
2248 // // /* HeapReference<Class> */ temp = temp->component_type_
2249 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002250 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002251 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2252 //
2253 // // /* HeapReference<Class> */ temp2 = value->klass_
2254 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002255 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002256 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2257 //
2258 // __ Cmp(temp, temp2);
2259 //
2260 // However, the second read barrier may trash `temp`, as it
2261 // is a temporary register, and as such would not be saved
2262 // along with live registers before calling the runtime (nor
2263 // restored afterwards). So in this case, we bail out and
2264 // delegate the work to the array set slow path.
2265 //
2266 // TODO: Extend the register allocator to support a new
2267 // "(locally) live temp" location so as to avoid always
2268 // going into the slow path when read barriers are enabled.
2269 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002270 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002271 Register temp2 = temps.AcquireSameSizeAs(array);
2272 // /* HeapReference<Class> */ temp = array->klass_
2273 __ Ldr(temp, HeapOperand(array, class_offset));
2274 codegen_->MaybeRecordImplicitNullCheck(instruction);
2275 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2276
2277 // /* HeapReference<Class> */ temp = temp->component_type_
2278 __ Ldr(temp, HeapOperand(temp, component_offset));
2279 // /* HeapReference<Class> */ temp2 = value->klass_
2280 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2281 // If heap poisoning is enabled, no need to unpoison `temp`
2282 // nor `temp2`, as we are comparing two poisoned references.
2283 __ Cmp(temp, temp2);
2284
2285 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2286 vixl::Label do_put;
2287 __ B(eq, &do_put);
2288 // If heap poisoning is enabled, the `temp` reference has
2289 // not been unpoisoned yet; unpoison it now.
2290 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2291
2292 // /* HeapReference<Class> */ temp = temp->super_class_
2293 __ Ldr(temp, HeapOperand(temp, super_offset));
2294 // If heap poisoning is enabled, no need to unpoison
2295 // `temp`, as we are comparing against null below.
2296 __ Cbnz(temp, slow_path->GetEntryLabel());
2297 __ Bind(&do_put);
2298 } else {
2299 __ B(ne, slow_path->GetEntryLabel());
2300 }
2301 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002302 }
2303 }
2304
2305 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002306 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002307 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002308 __ Mov(temp2, value.W());
2309 GetAssembler()->PoisonHeapReference(temp2);
2310 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002311 }
2312
2313 if (!index.IsConstant()) {
2314 __ Add(temp, array, offset);
2315 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002316 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002317
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002318 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002319 codegen_->MaybeRecordImplicitNullCheck(instruction);
2320 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002321 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002322
2323 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2324
2325 if (done.IsLinked()) {
2326 __ Bind(&done);
2327 }
2328
2329 if (slow_path != nullptr) {
2330 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002331 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002332 }
2333}
2334
Alexandre Rames67555f72014-11-18 10:55:16 +00002335void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002336 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2337 ? LocationSummary::kCallOnSlowPath
2338 : LocationSummary::kNoCall;
2339 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002340 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002341 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002342 if (instruction->HasUses()) {
2343 locations->SetOut(Location::SameAsFirstInput());
2344 }
2345}
2346
2347void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002348 BoundsCheckSlowPathARM64* slow_path =
2349 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002350 codegen_->AddSlowPath(slow_path);
2351
2352 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2353 __ B(slow_path->GetEntryLabel(), hs);
2354}
2355
Alexandre Rames67555f72014-11-18 10:55:16 +00002356void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2357 LocationSummary* locations =
2358 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2359 locations->SetInAt(0, Location::RequiresRegister());
2360 if (check->HasUses()) {
2361 locations->SetOut(Location::SameAsFirstInput());
2362 }
2363}
2364
2365void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2366 // We assume the class is not null.
2367 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2368 check->GetLoadClass(), check, check->GetDexPc(), true);
2369 codegen_->AddSlowPath(slow_path);
2370 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2371}
2372
Roland Levillain1a653882016-03-18 18:05:57 +00002373static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2374 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2375 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2376}
2377
2378void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2379 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2380 Location rhs_loc = instruction->GetLocations()->InAt(1);
2381 if (rhs_loc.IsConstant()) {
2382 // 0.0 is the only immediate that can be encoded directly in
2383 // an FCMP instruction.
2384 //
2385 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2386 // specify that in a floating-point comparison, positive zero
2387 // and negative zero are considered equal, so we can use the
2388 // literal 0.0 for both cases here.
2389 //
2390 // Note however that some methods (Float.equal, Float.compare,
2391 // Float.compareTo, Double.equal, Double.compare,
2392 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2393 // StrictMath.min) consider 0.0 to be (strictly) greater than
2394 // -0.0. So if we ever translate calls to these methods into a
2395 // HCompare instruction, we must handle the -0.0 case with
2396 // care here.
2397 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2398 __ Fcmp(lhs_reg, 0.0);
2399 } else {
2400 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2401 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002402}
2403
Serban Constantinescu02164b32014-11-13 14:05:07 +00002404void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002405 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002406 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2407 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002408 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002409 case Primitive::kPrimBoolean:
2410 case Primitive::kPrimByte:
2411 case Primitive::kPrimShort:
2412 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002413 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002414 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002415 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002416 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002417 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2418 break;
2419 }
2420 case Primitive::kPrimFloat:
2421 case Primitive::kPrimDouble: {
2422 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002423 locations->SetInAt(1,
2424 IsFloatingPointZeroConstant(compare->InputAt(1))
2425 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2426 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002427 locations->SetOut(Location::RequiresRegister());
2428 break;
2429 }
2430 default:
2431 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2432 }
2433}
2434
2435void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2436 Primitive::Type in_type = compare->InputAt(0)->GetType();
2437
2438 // 0 if: left == right
2439 // 1 if: left > right
2440 // -1 if: left < right
2441 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002442 case Primitive::kPrimBoolean:
2443 case Primitive::kPrimByte:
2444 case Primitive::kPrimShort:
2445 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002446 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002447 case Primitive::kPrimLong: {
2448 Register result = OutputRegister(compare);
2449 Register left = InputRegisterAt(compare, 0);
2450 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002451 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002452 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2453 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002454 break;
2455 }
2456 case Primitive::kPrimFloat:
2457 case Primitive::kPrimDouble: {
2458 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002459 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002460 __ Cset(result, ne);
2461 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002462 break;
2463 }
2464 default:
2465 LOG(FATAL) << "Unimplemented compare type " << in_type;
2466 }
2467}
2468
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002469void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002470 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002471
2472 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2473 locations->SetInAt(0, Location::RequiresFpuRegister());
2474 locations->SetInAt(1,
2475 IsFloatingPointZeroConstant(instruction->InputAt(1))
2476 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2477 : Location::RequiresFpuRegister());
2478 } else {
2479 // Integer cases.
2480 locations->SetInAt(0, Location::RequiresRegister());
2481 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2482 }
2483
David Brazdilb3e773e2016-01-26 11:28:37 +00002484 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002485 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002486 }
2487}
2488
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002489void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002490 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002491 return;
2492 }
2493
2494 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002495 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002496 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002497
Roland Levillain7f63c522015-07-13 15:54:55 +00002498 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002499 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002500 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002501 } else {
2502 // Integer cases.
2503 Register lhs = InputRegisterAt(instruction, 0);
2504 Operand rhs = InputOperandAt(instruction, 1);
2505 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002506 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002507 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002508}
2509
2510#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2511 M(Equal) \
2512 M(NotEqual) \
2513 M(LessThan) \
2514 M(LessThanOrEqual) \
2515 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002516 M(GreaterThanOrEqual) \
2517 M(Below) \
2518 M(BelowOrEqual) \
2519 M(Above) \
2520 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002521#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002522void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2523void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002524FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002525#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002526#undef FOR_EACH_CONDITION_INSTRUCTION
2527
Zheng Xuc6667102015-05-15 16:08:45 +08002528void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2529 DCHECK(instruction->IsDiv() || instruction->IsRem());
2530
2531 LocationSummary* locations = instruction->GetLocations();
2532 Location second = locations->InAt(1);
2533 DCHECK(second.IsConstant());
2534
2535 Register out = OutputRegister(instruction);
2536 Register dividend = InputRegisterAt(instruction, 0);
2537 int64_t imm = Int64FromConstant(second.GetConstant());
2538 DCHECK(imm == 1 || imm == -1);
2539
2540 if (instruction->IsRem()) {
2541 __ Mov(out, 0);
2542 } else {
2543 if (imm == 1) {
2544 __ Mov(out, dividend);
2545 } else {
2546 __ Neg(out, dividend);
2547 }
2548 }
2549}
2550
2551void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2552 DCHECK(instruction->IsDiv() || instruction->IsRem());
2553
2554 LocationSummary* locations = instruction->GetLocations();
2555 Location second = locations->InAt(1);
2556 DCHECK(second.IsConstant());
2557
2558 Register out = OutputRegister(instruction);
2559 Register dividend = InputRegisterAt(instruction, 0);
2560 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002561 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002562 int ctz_imm = CTZ(abs_imm);
2563
2564 UseScratchRegisterScope temps(GetVIXLAssembler());
2565 Register temp = temps.AcquireSameSizeAs(out);
2566
2567 if (instruction->IsDiv()) {
2568 __ Add(temp, dividend, abs_imm - 1);
2569 __ Cmp(dividend, 0);
2570 __ Csel(out, temp, dividend, lt);
2571 if (imm > 0) {
2572 __ Asr(out, out, ctz_imm);
2573 } else {
2574 __ Neg(out, Operand(out, ASR, ctz_imm));
2575 }
2576 } else {
2577 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2578 __ Asr(temp, dividend, bits - 1);
2579 __ Lsr(temp, temp, bits - ctz_imm);
2580 __ Add(out, dividend, temp);
2581 __ And(out, out, abs_imm - 1);
2582 __ Sub(out, out, temp);
2583 }
2584}
2585
2586void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2587 DCHECK(instruction->IsDiv() || instruction->IsRem());
2588
2589 LocationSummary* locations = instruction->GetLocations();
2590 Location second = locations->InAt(1);
2591 DCHECK(second.IsConstant());
2592
2593 Register out = OutputRegister(instruction);
2594 Register dividend = InputRegisterAt(instruction, 0);
2595 int64_t imm = Int64FromConstant(second.GetConstant());
2596
2597 Primitive::Type type = instruction->GetResultType();
2598 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2599
2600 int64_t magic;
2601 int shift;
2602 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2603
2604 UseScratchRegisterScope temps(GetVIXLAssembler());
2605 Register temp = temps.AcquireSameSizeAs(out);
2606
2607 // temp = get_high(dividend * magic)
2608 __ Mov(temp, magic);
2609 if (type == Primitive::kPrimLong) {
2610 __ Smulh(temp, dividend, temp);
2611 } else {
2612 __ Smull(temp.X(), dividend, temp);
2613 __ Lsr(temp.X(), temp.X(), 32);
2614 }
2615
2616 if (imm > 0 && magic < 0) {
2617 __ Add(temp, temp, dividend);
2618 } else if (imm < 0 && magic > 0) {
2619 __ Sub(temp, temp, dividend);
2620 }
2621
2622 if (shift != 0) {
2623 __ Asr(temp, temp, shift);
2624 }
2625
2626 if (instruction->IsDiv()) {
2627 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2628 } else {
2629 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2630 // TODO: Strength reduction for msub.
2631 Register temp_imm = temps.AcquireSameSizeAs(out);
2632 __ Mov(temp_imm, imm);
2633 __ Msub(out, temp, temp_imm, dividend);
2634 }
2635}
2636
2637void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2638 DCHECK(instruction->IsDiv() || instruction->IsRem());
2639 Primitive::Type type = instruction->GetResultType();
2640 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2641
2642 LocationSummary* locations = instruction->GetLocations();
2643 Register out = OutputRegister(instruction);
2644 Location second = locations->InAt(1);
2645
2646 if (second.IsConstant()) {
2647 int64_t imm = Int64FromConstant(second.GetConstant());
2648
2649 if (imm == 0) {
2650 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2651 } else if (imm == 1 || imm == -1) {
2652 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002653 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002654 DivRemByPowerOfTwo(instruction);
2655 } else {
2656 DCHECK(imm <= -2 || imm >= 2);
2657 GenerateDivRemWithAnyConstant(instruction);
2658 }
2659 } else {
2660 Register dividend = InputRegisterAt(instruction, 0);
2661 Register divisor = InputRegisterAt(instruction, 1);
2662 if (instruction->IsDiv()) {
2663 __ Sdiv(out, dividend, divisor);
2664 } else {
2665 UseScratchRegisterScope temps(GetVIXLAssembler());
2666 Register temp = temps.AcquireSameSizeAs(out);
2667 __ Sdiv(temp, dividend, divisor);
2668 __ Msub(out, temp, divisor, dividend);
2669 }
2670 }
2671}
2672
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002673void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2674 LocationSummary* locations =
2675 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2676 switch (div->GetResultType()) {
2677 case Primitive::kPrimInt:
2678 case Primitive::kPrimLong:
2679 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002680 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002681 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2682 break;
2683
2684 case Primitive::kPrimFloat:
2685 case Primitive::kPrimDouble:
2686 locations->SetInAt(0, Location::RequiresFpuRegister());
2687 locations->SetInAt(1, Location::RequiresFpuRegister());
2688 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2689 break;
2690
2691 default:
2692 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2693 }
2694}
2695
2696void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2697 Primitive::Type type = div->GetResultType();
2698 switch (type) {
2699 case Primitive::kPrimInt:
2700 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002701 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002702 break;
2703
2704 case Primitive::kPrimFloat:
2705 case Primitive::kPrimDouble:
2706 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2707 break;
2708
2709 default:
2710 LOG(FATAL) << "Unexpected div type " << type;
2711 }
2712}
2713
Alexandre Rames67555f72014-11-18 10:55:16 +00002714void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002715 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2716 ? LocationSummary::kCallOnSlowPath
2717 : LocationSummary::kNoCall;
2718 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002719 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2720 if (instruction->HasUses()) {
2721 locations->SetOut(Location::SameAsFirstInput());
2722 }
2723}
2724
2725void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2726 SlowPathCodeARM64* slow_path =
2727 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2728 codegen_->AddSlowPath(slow_path);
2729 Location value = instruction->GetLocations()->InAt(0);
2730
Alexandre Rames3e69f162014-12-10 10:36:50 +00002731 Primitive::Type type = instruction->GetType();
2732
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002733 if (!Primitive::IsIntegralType(type)) {
2734 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002735 return;
2736 }
2737
Alexandre Rames67555f72014-11-18 10:55:16 +00002738 if (value.IsConstant()) {
2739 int64_t divisor = Int64ConstantFrom(value);
2740 if (divisor == 0) {
2741 __ B(slow_path->GetEntryLabel());
2742 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002743 // A division by a non-null constant is valid. We don't need to perform
2744 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002745 }
2746 } else {
2747 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2748 }
2749}
2750
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002751void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2752 LocationSummary* locations =
2753 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2754 locations->SetOut(Location::ConstantLocation(constant));
2755}
2756
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002757void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2758 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002759 // Will be generated at use site.
2760}
2761
Alexandre Rames5319def2014-10-23 10:03:10 +01002762void LocationsBuilderARM64::VisitExit(HExit* exit) {
2763 exit->SetLocations(nullptr);
2764}
2765
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002766void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002767}
2768
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002769void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2770 LocationSummary* locations =
2771 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2772 locations->SetOut(Location::ConstantLocation(constant));
2773}
2774
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002775void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002776 // Will be generated at use site.
2777}
2778
David Brazdilfc6a86a2015-06-26 10:33:45 +00002779void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002780 DCHECK(!successor->IsExitBlock());
2781 HBasicBlock* block = got->GetBlock();
2782 HInstruction* previous = got->GetPrevious();
2783 HLoopInformation* info = block->GetLoopInformation();
2784
David Brazdil46e2a392015-03-16 17:31:52 +00002785 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002786 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2787 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2788 return;
2789 }
2790 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2791 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2792 }
2793 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002794 __ B(codegen_->GetLabelOf(successor));
2795 }
2796}
2797
David Brazdilfc6a86a2015-06-26 10:33:45 +00002798void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2799 got->SetLocations(nullptr);
2800}
2801
2802void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2803 HandleGoto(got, got->GetSuccessor());
2804}
2805
2806void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2807 try_boundary->SetLocations(nullptr);
2808}
2809
2810void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2811 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2812 if (!successor->IsExitBlock()) {
2813 HandleGoto(try_boundary, successor);
2814 }
2815}
2816
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002817void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002818 size_t condition_input_index,
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002819 vixl::Label* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00002820 vixl::Label* false_target) {
2821 // FP branching requires both targets to be explicit. If either of the targets
2822 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
2823 vixl::Label fallthrough_target;
2824 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002825
David Brazdil0debae72015-11-12 18:37:00 +00002826 if (true_target == nullptr && false_target == nullptr) {
2827 // Nothing to do. The code always falls through.
2828 return;
2829 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002830 // Constant condition, statically compared against "true" (integer value 1).
2831 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002832 if (true_target != nullptr) {
2833 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002834 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002835 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002836 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002837 if (false_target != nullptr) {
2838 __ B(false_target);
2839 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002840 }
David Brazdil0debae72015-11-12 18:37:00 +00002841 return;
2842 }
2843
2844 // The following code generates these patterns:
2845 // (1) true_target == nullptr && false_target != nullptr
2846 // - opposite condition true => branch to false_target
2847 // (2) true_target != nullptr && false_target == nullptr
2848 // - condition true => branch to true_target
2849 // (3) true_target != nullptr && false_target != nullptr
2850 // - condition true => branch to true_target
2851 // - branch to false_target
2852 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002853 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002854 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002855 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002856 if (true_target == nullptr) {
2857 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2858 } else {
2859 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2860 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002861 } else {
2862 // The condition instruction has not been materialized, use its inputs as
2863 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002864 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002865
David Brazdil0debae72015-11-12 18:37:00 +00002866 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002867 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002868 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002869 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002870 IfCondition opposite_condition = condition->GetOppositeCondition();
2871 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002872 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002873 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002874 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002875 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002876 // Integer cases.
2877 Register lhs = InputRegisterAt(condition, 0);
2878 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002879
2880 Condition arm64_cond;
2881 vixl::Label* non_fallthrough_target;
2882 if (true_target == nullptr) {
2883 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2884 non_fallthrough_target = false_target;
2885 } else {
2886 arm64_cond = ARM64Condition(condition->GetCondition());
2887 non_fallthrough_target = true_target;
2888 }
2889
Aart Bik086d27e2016-01-20 17:02:00 -08002890 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
2891 rhs.IsImmediate() && (rhs.immediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002892 switch (arm64_cond) {
2893 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002894 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002895 break;
2896 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002897 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002898 break;
2899 case lt:
2900 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002901 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002902 break;
2903 case ge:
2904 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002905 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002906 break;
2907 default:
2908 // Without the `static_cast` the compiler throws an error for
2909 // `-Werror=sign-promo`.
2910 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2911 }
2912 } else {
2913 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002914 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002915 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002916 }
2917 }
David Brazdil0debae72015-11-12 18:37:00 +00002918
2919 // If neither branch falls through (case 3), the conditional branch to `true_target`
2920 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2921 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002922 __ B(false_target);
2923 }
David Brazdil0debae72015-11-12 18:37:00 +00002924
2925 if (fallthrough_target.IsLinked()) {
2926 __ Bind(&fallthrough_target);
2927 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002928}
2929
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002930void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2931 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002932 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002933 locations->SetInAt(0, Location::RequiresRegister());
2934 }
2935}
2936
2937void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002938 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2939 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
2940 vixl::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2941 nullptr : codegen_->GetLabelOf(true_successor);
2942 vixl::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2943 nullptr : codegen_->GetLabelOf(false_successor);
2944 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002945}
2946
2947void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2948 LocationSummary* locations = new (GetGraph()->GetArena())
2949 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002950 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002951 locations->SetInAt(0, Location::RequiresRegister());
2952 }
2953}
2954
2955void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002956 SlowPathCodeARM64* slow_path =
2957 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002958 GenerateTestAndBranch(deoptimize,
2959 /* condition_input_index */ 0,
2960 slow_path->GetEntryLabel(),
2961 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002962}
2963
David Brazdilc0b601b2016-02-08 14:20:45 +00002964static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2965 return condition->IsCondition() &&
2966 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2967}
2968
Alexandre Rames880f1192016-06-13 16:04:50 +01002969static inline Condition GetConditionForSelect(HCondition* condition) {
2970 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00002971 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
2972 : ARM64Condition(cond);
2973}
2974
David Brazdil74eb1b22015-12-14 11:44:01 +00002975void LocationsBuilderARM64::VisitSelect(HSelect* select) {
2976 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01002977 if (Primitive::IsFloatingPointType(select->GetType())) {
2978 locations->SetInAt(0, Location::RequiresFpuRegister());
2979 locations->SetInAt(1, Location::RequiresFpuRegister());
2980 locations->SetOut(Location::RequiresFpuRegister());
2981 } else {
2982 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
2983 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
2984 bool is_true_value_constant = cst_true_value != nullptr;
2985 bool is_false_value_constant = cst_false_value != nullptr;
2986 // Ask VIXL whether we should synthesize constants in registers.
2987 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
2988 Operand true_op = is_true_value_constant ?
2989 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
2990 Operand false_op = is_false_value_constant ?
2991 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
2992 bool true_value_in_register = false;
2993 bool false_value_in_register = false;
2994 MacroAssembler::GetCselSynthesisInformation(
2995 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
2996 true_value_in_register |= !is_true_value_constant;
2997 false_value_in_register |= !is_false_value_constant;
2998
2999 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3000 : Location::ConstantLocation(cst_true_value));
3001 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3002 : Location::ConstantLocation(cst_false_value));
3003 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003004 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003005
David Brazdil74eb1b22015-12-14 11:44:01 +00003006 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3007 locations->SetInAt(2, Location::RequiresRegister());
3008 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003009}
3010
3011void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003012 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003013 Condition csel_cond;
3014
3015 if (IsBooleanValueOrMaterializedCondition(cond)) {
3016 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003017 // Use the condition flags set by the previous instruction.
3018 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003019 } else {
3020 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003021 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003022 }
3023 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003024 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003025 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003026 } else {
3027 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003028 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003029 }
3030
Alexandre Rames880f1192016-06-13 16:04:50 +01003031 if (Primitive::IsFloatingPointType(select->GetType())) {
3032 __ Fcsel(OutputFPRegister(select),
3033 InputFPRegisterAt(select, 1),
3034 InputFPRegisterAt(select, 0),
3035 csel_cond);
3036 } else {
3037 __ Csel(OutputRegister(select),
3038 InputOperandAt(select, 1),
3039 InputOperandAt(select, 0),
3040 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003041 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003042}
3043
David Srbecky0cf44932015-12-09 14:09:59 +00003044void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3045 new (GetGraph()->GetArena()) LocationSummary(info);
3046}
3047
David Srbeckyd28f4a02016-03-14 17:14:24 +00003048void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3049 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003050}
3051
3052void CodeGeneratorARM64::GenerateNop() {
3053 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003054}
3055
Alexandre Rames5319def2014-10-23 10:03:10 +01003056void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003057 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003058}
3059
3060void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003061 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003062}
3063
3064void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003065 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003066}
3067
3068void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003069 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003070}
3071
Roland Levillain44015862016-01-22 11:47:17 +00003072static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3073 return kEmitCompilerReadBarrier &&
3074 (kUseBakerReadBarrier ||
3075 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3076 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3077 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3078}
3079
Alexandre Rames67555f72014-11-18 10:55:16 +00003080void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003081 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003082 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3083 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003084 case TypeCheckKind::kExactCheck:
3085 case TypeCheckKind::kAbstractClassCheck:
3086 case TypeCheckKind::kClassHierarchyCheck:
3087 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003088 call_kind =
3089 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003090 break;
3091 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003092 case TypeCheckKind::kUnresolvedCheck:
3093 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003094 call_kind = LocationSummary::kCallOnSlowPath;
3095 break;
3096 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003097
Alexandre Rames67555f72014-11-18 10:55:16 +00003098 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003099 locations->SetInAt(0, Location::RequiresRegister());
3100 locations->SetInAt(1, Location::RequiresRegister());
3101 // The "out" register is used as a temporary, so it overlaps with the inputs.
3102 // Note that TypeCheckSlowPathARM64 uses this register too.
3103 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3104 // When read barriers are enabled, we need a temporary register for
3105 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003106 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003107 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003108 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003109}
3110
3111void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003112 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003113 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003114 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003115 Register obj = InputRegisterAt(instruction, 0);
3116 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003117 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003118 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003119 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3120 locations->GetTemp(0) :
3121 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003122 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3123 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3124 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3125 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003126
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003127 vixl::Label done, zero;
3128 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003129
3130 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003131 // Avoid null check if we know `obj` is not null.
3132 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003133 __ Cbz(obj, &zero);
3134 }
3135
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003136 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003137 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003138
Roland Levillain44015862016-01-22 11:47:17 +00003139 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003140 case TypeCheckKind::kExactCheck: {
3141 __ Cmp(out, cls);
3142 __ Cset(out, eq);
3143 if (zero.IsLinked()) {
3144 __ B(&done);
3145 }
3146 break;
3147 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003148
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003149 case TypeCheckKind::kAbstractClassCheck: {
3150 // If the class is abstract, we eagerly fetch the super class of the
3151 // object to avoid doing a comparison we know will fail.
3152 vixl::Label loop, success;
3153 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003154 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003155 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003156 // If `out` is null, we use it for the result, and jump to `done`.
3157 __ Cbz(out, &done);
3158 __ Cmp(out, cls);
3159 __ B(ne, &loop);
3160 __ Mov(out, 1);
3161 if (zero.IsLinked()) {
3162 __ B(&done);
3163 }
3164 break;
3165 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003166
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003167 case TypeCheckKind::kClassHierarchyCheck: {
3168 // Walk over the class hierarchy to find a match.
3169 vixl::Label loop, success;
3170 __ Bind(&loop);
3171 __ Cmp(out, cls);
3172 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003173 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003174 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003175 __ Cbnz(out, &loop);
3176 // If `out` is null, we use it for the result, and jump to `done`.
3177 __ B(&done);
3178 __ Bind(&success);
3179 __ Mov(out, 1);
3180 if (zero.IsLinked()) {
3181 __ B(&done);
3182 }
3183 break;
3184 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003185
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003186 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003187 // Do an exact check.
3188 vixl::Label exact_check;
3189 __ Cmp(out, cls);
3190 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003191 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003192 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003193 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003194 // If `out` is null, we use it for the result, and jump to `done`.
3195 __ Cbz(out, &done);
3196 __ Ldrh(out, HeapOperand(out, primitive_offset));
3197 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3198 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003199 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003200 __ Mov(out, 1);
3201 __ B(&done);
3202 break;
3203 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003204
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003205 case TypeCheckKind::kArrayCheck: {
3206 __ Cmp(out, cls);
3207 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003208 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3209 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003210 codegen_->AddSlowPath(slow_path);
3211 __ B(ne, slow_path->GetEntryLabel());
3212 __ Mov(out, 1);
3213 if (zero.IsLinked()) {
3214 __ B(&done);
3215 }
3216 break;
3217 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003218
Calin Juravle98893e12015-10-02 21:05:03 +01003219 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003220 case TypeCheckKind::kInterfaceCheck: {
3221 // Note that we indeed only call on slow path, but we always go
3222 // into the slow path for the unresolved and interface check
3223 // cases.
3224 //
3225 // We cannot directly call the InstanceofNonTrivial runtime
3226 // entry point without resorting to a type checking slow path
3227 // here (i.e. by calling InvokeRuntime directly), as it would
3228 // require to assign fixed registers for the inputs of this
3229 // HInstanceOf instruction (following the runtime calling
3230 // convention), which might be cluttered by the potential first
3231 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003232 //
3233 // TODO: Introduce a new runtime entry point taking the object
3234 // to test (instead of its class) as argument, and let it deal
3235 // with the read barrier issues. This will let us refactor this
3236 // case of the `switch` code as it was previously (with a direct
3237 // call to the runtime not using a type checking slow path).
3238 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003239 DCHECK(locations->OnlyCallsOnSlowPath());
3240 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3241 /* is_fatal */ false);
3242 codegen_->AddSlowPath(slow_path);
3243 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003244 if (zero.IsLinked()) {
3245 __ B(&done);
3246 }
3247 break;
3248 }
3249 }
3250
3251 if (zero.IsLinked()) {
3252 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003253 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003254 }
3255
3256 if (done.IsLinked()) {
3257 __ Bind(&done);
3258 }
3259
3260 if (slow_path != nullptr) {
3261 __ Bind(slow_path->GetExitLabel());
3262 }
3263}
3264
3265void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3266 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3267 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3268
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003269 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3270 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003271 case TypeCheckKind::kExactCheck:
3272 case TypeCheckKind::kAbstractClassCheck:
3273 case TypeCheckKind::kClassHierarchyCheck:
3274 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003275 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3276 LocationSummary::kCallOnSlowPath :
3277 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003278 break;
3279 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003280 case TypeCheckKind::kUnresolvedCheck:
3281 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003282 call_kind = LocationSummary::kCallOnSlowPath;
3283 break;
3284 }
3285
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003286 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3287 locations->SetInAt(0, Location::RequiresRegister());
3288 locations->SetInAt(1, Location::RequiresRegister());
3289 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3290 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003291 // When read barriers are enabled, we need an additional temporary
3292 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003293 if (TypeCheckNeedsATemporary(type_check_kind)) {
3294 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003295 }
3296}
3297
3298void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003299 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003300 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003301 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003302 Register obj = InputRegisterAt(instruction, 0);
3303 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003304 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003305 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3306 locations->GetTemp(1) :
3307 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003308 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003309 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3310 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3311 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3312 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003313
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003314 bool is_type_check_slow_path_fatal =
3315 (type_check_kind == TypeCheckKind::kExactCheck ||
3316 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3317 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3318 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3319 !instruction->CanThrowIntoCatchBlock();
3320 SlowPathCodeARM64* type_check_slow_path =
3321 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3322 is_type_check_slow_path_fatal);
3323 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003324
3325 vixl::Label done;
3326 // Avoid null check if we know obj is not null.
3327 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003328 __ Cbz(obj, &done);
3329 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003330
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003331 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003332 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003333
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003334 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003335 case TypeCheckKind::kExactCheck:
3336 case TypeCheckKind::kArrayCheck: {
3337 __ Cmp(temp, cls);
3338 // Jump to slow path for throwing the exception or doing a
3339 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003340 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003341 break;
3342 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003343
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003344 case TypeCheckKind::kAbstractClassCheck: {
3345 // If the class is abstract, we eagerly fetch the super class of the
3346 // object to avoid doing a comparison we know will fail.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003347 vixl::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003348 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003349 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003350 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003351
3352 // If the class reference currently in `temp` is not null, jump
3353 // to the `compare_classes` label to compare it with the checked
3354 // class.
3355 __ Cbnz(temp, &compare_classes);
3356 // Otherwise, jump to the slow path to throw the exception.
3357 //
3358 // But before, move back the object's class into `temp` before
3359 // going into the slow path, as it has been overwritten in the
3360 // meantime.
3361 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003362 GenerateReferenceLoadTwoRegisters(
3363 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003364 __ B(type_check_slow_path->GetEntryLabel());
3365
3366 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003367 __ Cmp(temp, cls);
3368 __ B(ne, &loop);
3369 break;
3370 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003371
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003372 case TypeCheckKind::kClassHierarchyCheck: {
3373 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003374 vixl::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003375 __ Bind(&loop);
3376 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003377 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003378
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003379 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003380 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003381
3382 // If the class reference currently in `temp` is not null, jump
3383 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003384 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003385 // Otherwise, jump to the slow path to throw the exception.
3386 //
3387 // But before, move back the object's class into `temp` before
3388 // going into the slow path, as it has been overwritten in the
3389 // meantime.
3390 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003391 GenerateReferenceLoadTwoRegisters(
3392 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003393 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003394 break;
3395 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003396
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003397 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003398 // Do an exact check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003399 vixl::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003400 __ Cmp(temp, cls);
3401 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003402
3403 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003404 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003405 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003406
3407 // If the component type is not null (i.e. the object is indeed
3408 // an array), jump to label `check_non_primitive_component_type`
3409 // to further check that this component type is not a primitive
3410 // type.
3411 __ Cbnz(temp, &check_non_primitive_component_type);
3412 // Otherwise, jump to the slow path to throw the exception.
3413 //
3414 // But before, move back the object's class into `temp` before
3415 // going into the slow path, as it has been overwritten in the
3416 // meantime.
3417 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003418 GenerateReferenceLoadTwoRegisters(
3419 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003420 __ B(type_check_slow_path->GetEntryLabel());
3421
3422 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003423 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3424 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003425 __ Cbz(temp, &done);
3426 // Same comment as above regarding `temp` and the slow path.
3427 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003428 GenerateReferenceLoadTwoRegisters(
3429 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003430 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003431 break;
3432 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003433
Calin Juravle98893e12015-10-02 21:05:03 +01003434 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003435 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003436 // We always go into the type check slow path for the unresolved
3437 // and interface check cases.
3438 //
3439 // We cannot directly call the CheckCast runtime entry point
3440 // without resorting to a type checking slow path here (i.e. by
3441 // calling InvokeRuntime directly), as it would require to
3442 // assign fixed registers for the inputs of this HInstanceOf
3443 // instruction (following the runtime calling convention), which
3444 // might be cluttered by the potential first read barrier
3445 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003446 //
3447 // TODO: Introduce a new runtime entry point taking the object
3448 // to test (instead of its class) as argument, and let it deal
3449 // with the read barrier issues. This will let us refactor this
3450 // case of the `switch` code as it was previously (with a direct
3451 // call to the runtime not using a type checking slow path).
3452 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003453 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003454 break;
3455 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003456 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003457
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003458 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003459}
3460
Alexandre Rames5319def2014-10-23 10:03:10 +01003461void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3462 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3463 locations->SetOut(Location::ConstantLocation(constant));
3464}
3465
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003466void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003467 // Will be generated at use site.
3468}
3469
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003470void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3471 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3472 locations->SetOut(Location::ConstantLocation(constant));
3473}
3474
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003475void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003476 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003477}
3478
Calin Juravle175dc732015-08-25 15:42:32 +01003479void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3480 // The trampoline uses the same calling convention as dex calling conventions,
3481 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3482 // the method_idx.
3483 HandleInvoke(invoke);
3484}
3485
3486void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3487 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3488}
3489
Alexandre Rames5319def2014-10-23 10:03:10 +01003490void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003491 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003492 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003493}
3494
Alexandre Rames67555f72014-11-18 10:55:16 +00003495void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3496 HandleInvoke(invoke);
3497}
3498
3499void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3500 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003501 LocationSummary* locations = invoke->GetLocations();
3502 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003503 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003504 Offset class_offset = mirror::Object::ClassOffset();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003505 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003506
3507 // The register ip1 is required to be used for the hidden argument in
3508 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003509 MacroAssembler* masm = GetVIXLAssembler();
3510 UseScratchRegisterScope scratch_scope(masm);
3511 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003512 scratch_scope.Exclude(ip1);
3513 __ Mov(ip1, invoke->GetDexMethodIndex());
3514
Alexandre Rames67555f72014-11-18 10:55:16 +00003515 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003516 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003517 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003518 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003519 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003520 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003521 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003522 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003523 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003524 // Instead of simply (possibly) unpoisoning `temp` here, we should
3525 // emit a read barrier for the previous class reference load.
3526 // However this is not required in practice, as this is an
3527 // intermediate/temporary reference and because the current
3528 // concurrent copying collector keeps the from-space memory
3529 // intact/accessible until the end of the marking phase (the
3530 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003531 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Nelli Kimbadee982016-05-13 13:08:53 +03003532 __ Ldr(temp,
3533 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3534 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity50706432016-06-14 11:31:04 -07003535 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003536 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003537 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003538 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003539 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003540 // lr();
3541 __ Blr(lr);
3542 DCHECK(!codegen_->IsLeafMethod());
3543 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3544}
3545
3546void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003547 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3548 if (intrinsic.TryDispatch(invoke)) {
3549 return;
3550 }
3551
Alexandre Rames67555f72014-11-18 10:55:16 +00003552 HandleInvoke(invoke);
3553}
3554
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003555void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003556 // Explicit clinit checks triggered by static invokes must have been pruned by
3557 // art::PrepareForRegisterAllocation.
3558 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003559
Andreas Gampe878d58c2015-01-15 23:24:00 -08003560 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3561 if (intrinsic.TryDispatch(invoke)) {
3562 return;
3563 }
3564
Alexandre Rames67555f72014-11-18 10:55:16 +00003565 HandleInvoke(invoke);
3566}
3567
Andreas Gampe878d58c2015-01-15 23:24:00 -08003568static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3569 if (invoke->GetLocations()->Intrinsified()) {
3570 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3571 intrinsic.Dispatch(invoke);
3572 return true;
3573 }
3574 return false;
3575}
3576
Vladimir Markodc151b22015-10-15 18:02:30 +01003577HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3578 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3579 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003580 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003581 return desired_dispatch_info;
3582}
3583
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003584void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003585 // For better instruction scheduling we load the direct code pointer before the method pointer.
3586 bool direct_code_loaded = false;
3587 switch (invoke->GetCodePtrLocation()) {
3588 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3589 // LR = code address from literal pool with link-time patch.
3590 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3591 direct_code_loaded = true;
3592 break;
3593 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3594 // LR = invoke->GetDirectCodePtr();
3595 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3596 direct_code_loaded = true;
3597 break;
3598 default:
3599 break;
3600 }
3601
Andreas Gampe878d58c2015-01-15 23:24:00 -08003602 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003603 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3604 switch (invoke->GetMethodLoadKind()) {
3605 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3606 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003607 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003608 break;
3609 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003610 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003611 break;
3612 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3613 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003614 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003615 break;
3616 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3617 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003618 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003619 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3620 break;
3621 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3622 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003623 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3624 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
3625 vixl::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003626 {
3627 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003628 __ Bind(adrp_label);
3629 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003630 }
Vladimir Marko58155012015-08-19 12:49:41 +00003631 // Add LDR with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003632 vixl::Label* ldr_label =
3633 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003634 {
3635 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003636 __ Bind(ldr_label);
3637 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003638 }
Vladimir Marko58155012015-08-19 12:49:41 +00003639 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003640 }
Vladimir Marko58155012015-08-19 12:49:41 +00003641 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003642 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003643 Register reg = XRegisterFrom(temp);
3644 Register method_reg;
3645 if (current_method.IsRegister()) {
3646 method_reg = XRegisterFrom(current_method);
3647 } else {
3648 DCHECK(invoke->GetLocations()->Intrinsified());
3649 DCHECK(!current_method.IsValid());
3650 method_reg = reg;
3651 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3652 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003653
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003654 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003655 __ Ldr(reg.X(),
3656 MemOperand(method_reg.X(),
3657 ArtMethod::DexCacheResolvedMethodsOffset(kArm64WordSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003658 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003659 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3660 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003661 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3662 break;
3663 }
3664 }
3665
3666 switch (invoke->GetCodePtrLocation()) {
3667 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3668 __ Bl(&frame_entry_label_);
3669 break;
3670 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3671 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
3672 vixl::Label* label = &relative_call_patches_.back().label;
Alexandre Rames6dc01742015-11-12 14:44:19 +00003673 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
3674 __ Bind(label);
3675 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003676 break;
3677 }
3678 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3679 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3680 // LR prepared above for better instruction scheduling.
3681 DCHECK(direct_code_loaded);
3682 // lr()
3683 __ Blr(lr);
3684 break;
3685 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3686 // LR = callee_method->entry_point_from_quick_compiled_code_;
3687 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003688 XRegisterFrom(callee_method),
Vladimir Marko58155012015-08-19 12:49:41 +00003689 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value()));
3690 // lr()
3691 __ Blr(lr);
3692 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003693 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003694
Andreas Gampe878d58c2015-01-15 23:24:00 -08003695 DCHECK(!IsLeafMethod());
3696}
3697
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003698void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003699 // Use the calling convention instead of the location of the receiver, as
3700 // intrinsics may have put the receiver in a different register. In the intrinsics
3701 // slow path, the arguments have been moved to the right place, so here we are
3702 // guaranteed that the receiver is the first register of the calling convention.
3703 InvokeDexCallingConvention calling_convention;
3704 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003705 Register temp = XRegisterFrom(temp_in);
3706 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3707 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3708 Offset class_offset = mirror::Object::ClassOffset();
3709 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
3710
3711 BlockPoolsScope block_pools(GetVIXLAssembler());
3712
3713 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003714 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003715 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003716 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003717 // Instead of simply (possibly) unpoisoning `temp` here, we should
3718 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003719 // intermediate/temporary reference and because the current
3720 // concurrent copying collector keeps the from-space memory
3721 // intact/accessible until the end of the marking phase (the
3722 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003723 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3724 // temp = temp->GetMethodAt(method_offset);
3725 __ Ldr(temp, MemOperand(temp, method_offset));
3726 // lr = temp->GetEntryPoint();
3727 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3728 // lr();
3729 __ Blr(lr);
3730}
3731
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003732vixl::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(const DexFile& dex_file,
3733 uint32_t string_index,
3734 vixl::Label* adrp_label) {
3735 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3736}
3737
3738vixl::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
3739 uint32_t element_offset,
3740 vixl::Label* adrp_label) {
3741 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3742}
3743
3744vixl::Label* CodeGeneratorARM64::NewPcRelativePatch(const DexFile& dex_file,
3745 uint32_t offset_or_index,
3746 vixl::Label* adrp_label,
3747 ArenaDeque<PcRelativePatchInfo>* patches) {
3748 // Add a patch entry and return the label.
3749 patches->emplace_back(dex_file, offset_or_index);
3750 PcRelativePatchInfo* info = &patches->back();
3751 vixl::Label* label = &info->label;
3752 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3753 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3754 return label;
3755}
3756
3757vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
3758 const DexFile& dex_file, uint32_t string_index) {
3759 return boot_image_string_patches_.GetOrCreate(
3760 StringReference(&dex_file, string_index),
3761 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3762}
3763
3764vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(uint64_t address) {
3765 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3766 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3767 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3768}
3769
3770vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(uint64_t address) {
3771 return DeduplicateUint64Literal(address);
3772}
3773
Vladimir Marko58155012015-08-19 12:49:41 +00003774void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3775 DCHECK(linker_patches->empty());
3776 size_t size =
3777 method_patches_.size() +
3778 call_patches_.size() +
3779 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003780 pc_relative_dex_cache_patches_.size() +
3781 boot_image_string_patches_.size() +
3782 pc_relative_string_patches_.size() +
3783 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003784 linker_patches->reserve(size);
3785 for (const auto& entry : method_patches_) {
3786 const MethodReference& target_method = entry.first;
3787 vixl::Literal<uint64_t>* literal = entry.second;
3788 linker_patches->push_back(LinkerPatch::MethodPatch(literal->offset(),
3789 target_method.dex_file,
3790 target_method.dex_method_index));
3791 }
3792 for (const auto& entry : call_patches_) {
3793 const MethodReference& target_method = entry.first;
3794 vixl::Literal<uint64_t>* literal = entry.second;
3795 linker_patches->push_back(LinkerPatch::CodePatch(literal->offset(),
3796 target_method.dex_file,
3797 target_method.dex_method_index));
3798 }
3799 for (const MethodPatchInfo<vixl::Label>& info : relative_call_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003800 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003801 info.target_method.dex_file,
3802 info.target_method.dex_method_index));
3803 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003804 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003805 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003806 &info.target_dex_file,
Alexandre Rames6dc01742015-11-12 14:44:19 +00003807 info.pc_insn_label->location(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003808 info.offset_or_index));
3809 }
3810 for (const auto& entry : boot_image_string_patches_) {
3811 const StringReference& target_string = entry.first;
3812 vixl::Literal<uint32_t>* literal = entry.second;
3813 linker_patches->push_back(LinkerPatch::StringPatch(literal->offset(),
3814 target_string.dex_file,
3815 target_string.string_index));
3816 }
3817 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
3818 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.location(),
3819 &info.target_dex_file,
3820 info.pc_insn_label->location(),
3821 info.offset_or_index));
3822 }
3823 for (const auto& entry : boot_image_address_patches_) {
3824 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
3825 vixl::Literal<uint32_t>* literal = entry.second;
3826 linker_patches->push_back(LinkerPatch::RecordPosition(literal->offset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003827 }
3828}
3829
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003830vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
3831 Uint32ToLiteralMap* map) {
3832 return map->GetOrCreate(
3833 value,
3834 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3835}
3836
Vladimir Marko58155012015-08-19 12:49:41 +00003837vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003838 return uint64_literals_.GetOrCreate(
3839 value,
3840 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003841}
3842
3843vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
3844 MethodReference target_method,
3845 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003846 return map->GetOrCreate(
3847 target_method,
3848 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003849}
3850
3851vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
3852 MethodReference target_method) {
3853 return DeduplicateMethodLiteral(target_method, &method_patches_);
3854}
3855
3856vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
3857 MethodReference target_method) {
3858 return DeduplicateMethodLiteral(target_method, &call_patches_);
3859}
3860
3861
Andreas Gampe878d58c2015-01-15 23:24:00 -08003862void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003863 // Explicit clinit checks triggered by static invokes must have been pruned by
3864 // art::PrepareForRegisterAllocation.
3865 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003866
Andreas Gampe878d58c2015-01-15 23:24:00 -08003867 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3868 return;
3869 }
3870
Alexandre Ramesd921d642015-04-16 15:07:16 +01003871 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003872 LocationSummary* locations = invoke->GetLocations();
3873 codegen_->GenerateStaticOrDirectCall(
3874 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003875 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003876}
3877
3878void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003879 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3880 return;
3881 }
3882
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003883 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003884 DCHECK(!codegen_->IsLeafMethod());
3885 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3886}
3887
Alexandre Rames67555f72014-11-18 10:55:16 +00003888void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003889 InvokeRuntimeCallingConvention calling_convention;
3890 CodeGenerator::CreateLoadClassLocationSummary(
3891 cls,
3892 LocationFrom(calling_convention.GetRegisterAt(0)),
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003893 LocationFrom(vixl::x0),
3894 /* code_generator_supports_read_barrier */ true);
Alexandre Rames67555f72014-11-18 10:55:16 +00003895}
3896
3897void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003898 if (cls->NeedsAccessCheck()) {
3899 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
3900 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
3901 cls,
3902 cls->GetDexPc(),
3903 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003904 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003905 return;
3906 }
3907
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003908 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01003909 Register out = OutputRegister(cls);
3910 Register current_method = InputRegisterAt(cls, 0);
3911 if (cls->IsReferrersClass()) {
Alexandre Rames67555f72014-11-18 10:55:16 +00003912 DCHECK(!cls->CanCallRuntime());
3913 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain44015862016-01-22 11:47:17 +00003914 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3915 GenerateGcRootFieldLoad(
3916 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Alexandre Rames67555f72014-11-18 10:55:16 +00003917 } else {
Vladimir Marko05792b92015-08-03 11:56:49 +01003918 MemberOffset resolved_types_offset = ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003919 // /* GcRoot<mirror::Class>[] */ out =
3920 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
Vladimir Marko05792b92015-08-03 11:56:49 +01003921 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00003922 // /* GcRoot<mirror::Class> */ out = out[type_index]
3923 GenerateGcRootFieldLoad(
3924 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003925
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00003926 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
3927 DCHECK(cls->CanCallRuntime());
3928 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
3929 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3930 codegen_->AddSlowPath(slow_path);
3931 if (!cls->IsInDexCache()) {
3932 __ Cbz(out, slow_path->GetEntryLabel());
3933 }
3934 if (cls->MustGenerateClinitCheck()) {
3935 GenerateClassInitializationCheck(slow_path, out);
3936 } else {
3937 __ Bind(slow_path->GetExitLabel());
3938 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003939 }
3940 }
3941}
3942
David Brazdilcb1c0552015-08-04 16:22:25 +01003943static MemOperand GetExceptionTlsAddress() {
3944 return MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
3945}
3946
Alexandre Rames67555f72014-11-18 10:55:16 +00003947void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
3948 LocationSummary* locations =
3949 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3950 locations->SetOut(Location::RequiresRegister());
3951}
3952
3953void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01003954 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
3955}
3956
3957void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
3958 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3959}
3960
3961void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
3962 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00003963}
3964
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003965HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
3966 HLoadString::LoadKind desired_string_load_kind) {
3967 if (kEmitCompilerReadBarrier) {
3968 switch (desired_string_load_kind) {
3969 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
3970 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
3971 case HLoadString::LoadKind::kBootImageAddress:
3972 // TODO: Implement for read barrier.
3973 return HLoadString::LoadKind::kDexCacheViaMethod;
3974 default:
3975 break;
3976 }
3977 }
3978 switch (desired_string_load_kind) {
3979 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
3980 DCHECK(!GetCompilerOptions().GetCompilePic());
3981 break;
3982 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
3983 DCHECK(GetCompilerOptions().GetCompilePic());
3984 break;
3985 case HLoadString::LoadKind::kBootImageAddress:
3986 break;
3987 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01003988 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003989 break;
3990 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01003991 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003992 break;
3993 case HLoadString::LoadKind::kDexCacheViaMethod:
3994 break;
3995 }
3996 return desired_string_load_kind;
3997}
3998
Alexandre Rames67555f72014-11-18 10:55:16 +00003999void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004000 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004001 ? LocationSummary::kCallOnSlowPath
4002 : LocationSummary::kNoCall;
4003 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004004 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4005 locations->SetInAt(0, Location::RequiresRegister());
4006 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004007 locations->SetOut(Location::RequiresRegister());
4008}
4009
4010void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004011 Location out_loc = load->GetLocations()->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00004012 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004013
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004014 switch (load->GetLoadKind()) {
4015 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4016 DCHECK(!kEmitCompilerReadBarrier);
4017 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4018 load->GetStringIndex()));
4019 return; // No dex cache slow path.
4020 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4021 DCHECK(!kEmitCompilerReadBarrier);
4022 // Add ADRP with its PC-relative String patch.
4023 const DexFile& dex_file = load->GetDexFile();
4024 uint32_t string_index = load->GetStringIndex();
4025 vixl::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
4026 {
4027 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4028 __ Bind(adrp_label);
4029 __ adrp(out.X(), /* offset placeholder */ 0);
4030 }
4031 // Add ADD with its PC-relative String patch.
4032 vixl::Label* add_label =
4033 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4034 {
4035 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4036 __ Bind(add_label);
4037 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4038 }
4039 return; // No dex cache slow path.
4040 }
4041 case HLoadString::LoadKind::kBootImageAddress: {
4042 DCHECK(!kEmitCompilerReadBarrier);
4043 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4044 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4045 return; // No dex cache slow path.
4046 }
4047 case HLoadString::LoadKind::kDexCacheAddress: {
4048 DCHECK_NE(load->GetAddress(), 0u);
4049 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4050 // that gives a 16KiB range. To try and reduce the number of literals if we load
4051 // multiple strings, simply split the dex cache address to a 16KiB aligned base
4052 // loaded from a literal and the remaining offset embedded in the load.
4053 static_assert(sizeof(GcRoot<mirror::String>) == 4u, "Expected GC root to be 4 bytes.");
4054 DCHECK_ALIGNED(load->GetAddress(), 4u);
4055 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4056 uint64_t base_address = load->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4057 uint32_t offset = load->GetAddress() & MaxInt<uint64_t>(offset_bits);
4058 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4059 GenerateGcRootFieldLoad(load, out_loc, out.X(), offset);
4060 break;
4061 }
4062 case HLoadString::LoadKind::kDexCachePcRelative: {
4063 // Add ADRP with its PC-relative DexCache access patch.
4064 const DexFile& dex_file = load->GetDexFile();
4065 uint32_t element_offset = load->GetDexCacheElementOffset();
4066 vixl::Label* adrp_label = codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
4067 {
4068 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4069 __ Bind(adrp_label);
4070 __ adrp(out.X(), /* offset placeholder */ 0);
4071 }
4072 // Add LDR with its PC-relative DexCache access patch.
4073 vixl::Label* ldr_label =
4074 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4075 GenerateGcRootFieldLoad(load, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4076 break;
4077 }
4078 case HLoadString::LoadKind::kDexCacheViaMethod: {
4079 Register current_method = InputRegisterAt(load, 0);
4080 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4081 GenerateGcRootFieldLoad(
4082 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4083 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
4084 __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
4085 // /* GcRoot<mirror::String> */ out = out[string_index]
4086 GenerateGcRootFieldLoad(
4087 load, out_loc, out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()));
4088 break;
4089 }
4090 default:
4091 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
4092 UNREACHABLE();
4093 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004094
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004095 if (!load->IsInDexCache()) {
4096 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
4097 codegen_->AddSlowPath(slow_path);
4098 __ Cbz(out, slow_path->GetEntryLabel());
4099 __ Bind(slow_path->GetExitLabel());
4100 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004101}
4102
Alexandre Rames5319def2014-10-23 10:03:10 +01004103void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4104 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4105 locations->SetOut(Location::ConstantLocation(constant));
4106}
4107
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004108void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004109 // Will be generated at use site.
4110}
4111
Alexandre Rames67555f72014-11-18 10:55:16 +00004112void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4113 LocationSummary* locations =
4114 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4115 InvokeRuntimeCallingConvention calling_convention;
4116 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4117}
4118
4119void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4120 codegen_->InvokeRuntime(instruction->IsEnter()
4121 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
4122 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004123 instruction->GetDexPc(),
4124 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004125 if (instruction->IsEnter()) {
4126 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4127 } else {
4128 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4129 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004130}
4131
Alexandre Rames42d641b2014-10-27 14:00:51 +00004132void LocationsBuilderARM64::VisitMul(HMul* mul) {
4133 LocationSummary* locations =
4134 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4135 switch (mul->GetResultType()) {
4136 case Primitive::kPrimInt:
4137 case Primitive::kPrimLong:
4138 locations->SetInAt(0, Location::RequiresRegister());
4139 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004140 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004141 break;
4142
4143 case Primitive::kPrimFloat:
4144 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004145 locations->SetInAt(0, Location::RequiresFpuRegister());
4146 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004147 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004148 break;
4149
4150 default:
4151 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4152 }
4153}
4154
4155void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4156 switch (mul->GetResultType()) {
4157 case Primitive::kPrimInt:
4158 case Primitive::kPrimLong:
4159 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4160 break;
4161
4162 case Primitive::kPrimFloat:
4163 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004164 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004165 break;
4166
4167 default:
4168 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4169 }
4170}
4171
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004172void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4173 LocationSummary* locations =
4174 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4175 switch (neg->GetResultType()) {
4176 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004177 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004178 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004179 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004180 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004181
4182 case Primitive::kPrimFloat:
4183 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004184 locations->SetInAt(0, Location::RequiresFpuRegister());
4185 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004186 break;
4187
4188 default:
4189 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4190 }
4191}
4192
4193void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4194 switch (neg->GetResultType()) {
4195 case Primitive::kPrimInt:
4196 case Primitive::kPrimLong:
4197 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4198 break;
4199
4200 case Primitive::kPrimFloat:
4201 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004202 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004203 break;
4204
4205 default:
4206 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4207 }
4208}
4209
4210void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4211 LocationSummary* locations =
4212 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4213 InvokeRuntimeCallingConvention calling_convention;
4214 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004215 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004216 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004217 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004218}
4219
4220void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4221 LocationSummary* locations = instruction->GetLocations();
4222 InvokeRuntimeCallingConvention calling_convention;
4223 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4224 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004225 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004226 // Note: if heap poisoning is enabled, the entry point takes cares
4227 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01004228 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4229 instruction,
4230 instruction->GetDexPc(),
4231 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004232 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004233}
4234
Alexandre Rames5319def2014-10-23 10:03:10 +01004235void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4236 LocationSummary* locations =
4237 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4238 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004239 if (instruction->IsStringAlloc()) {
4240 locations->AddTemp(LocationFrom(kArtMethodRegister));
4241 } else {
4242 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4243 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4244 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004245 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4246}
4247
4248void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004249 // Note: if heap poisoning is enabled, the entry point takes cares
4250 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004251 if (instruction->IsStringAlloc()) {
4252 // String is allocated through StringFactory. Call NewEmptyString entry point.
4253 Location temp = instruction->GetLocations()->GetTemp(0);
4254 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
4255 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4256 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4257 __ Blr(lr);
4258 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4259 } else {
4260 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4261 instruction,
4262 instruction->GetDexPc(),
4263 nullptr);
4264 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4265 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004266}
4267
4268void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4269 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004270 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004271 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004272}
4273
4274void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004275 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004276 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004277 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004278 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004279 break;
4280
4281 default:
4282 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4283 }
4284}
4285
David Brazdil66d126e2015-04-03 16:02:44 +01004286void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4287 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4288 locations->SetInAt(0, Location::RequiresRegister());
4289 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4290}
4291
4292void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01004293 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
4294}
4295
Alexandre Rames5319def2014-10-23 10:03:10 +01004296void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004297 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4298 ? LocationSummary::kCallOnSlowPath
4299 : LocationSummary::kNoCall;
4300 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004301 locations->SetInAt(0, Location::RequiresRegister());
4302 if (instruction->HasUses()) {
4303 locations->SetOut(Location::SameAsFirstInput());
4304 }
4305}
4306
Calin Juravle2ae48182016-03-16 14:05:09 +00004307void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4308 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004309 return;
4310 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004311
Alexandre Ramesd921d642015-04-16 15:07:16 +01004312 BlockPoolsScope block_pools(GetVIXLAssembler());
4313 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004314 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004315 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004316}
4317
Calin Juravle2ae48182016-03-16 14:05:09 +00004318void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004319 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004320 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004321
4322 LocationSummary* locations = instruction->GetLocations();
4323 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004324
4325 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004326}
4327
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004328void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004329 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004330}
4331
Alexandre Rames67555f72014-11-18 10:55:16 +00004332void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4333 HandleBinaryOp(instruction);
4334}
4335
4336void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4337 HandleBinaryOp(instruction);
4338}
4339
Alexandre Rames3e69f162014-12-10 10:36:50 +00004340void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4341 LOG(FATAL) << "Unreachable";
4342}
4343
4344void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4345 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4346}
4347
Alexandre Rames5319def2014-10-23 10:03:10 +01004348void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4349 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4350 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4351 if (location.IsStackSlot()) {
4352 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4353 } else if (location.IsDoubleStackSlot()) {
4354 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4355 }
4356 locations->SetOut(location);
4357}
4358
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004359void InstructionCodeGeneratorARM64::VisitParameterValue(
4360 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004361 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004362}
4363
4364void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4365 LocationSummary* locations =
4366 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004367 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004368}
4369
4370void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4371 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4372 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004373}
4374
4375void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4376 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004377 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004378 locations->SetInAt(i, Location::Any());
4379 }
4380 locations->SetOut(Location::Any());
4381}
4382
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004383void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004384 LOG(FATAL) << "Unreachable";
4385}
4386
Serban Constantinescu02164b32014-11-13 14:05:07 +00004387void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004388 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004389 LocationSummary::CallKind call_kind =
4390 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004391 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4392
4393 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004394 case Primitive::kPrimInt:
4395 case Primitive::kPrimLong:
4396 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004397 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004398 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4399 break;
4400
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004401 case Primitive::kPrimFloat:
4402 case Primitive::kPrimDouble: {
4403 InvokeRuntimeCallingConvention calling_convention;
4404 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4405 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4406 locations->SetOut(calling_convention.GetReturnLocation(type));
4407
4408 break;
4409 }
4410
Serban Constantinescu02164b32014-11-13 14:05:07 +00004411 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004412 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004413 }
4414}
4415
4416void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4417 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004418
Serban Constantinescu02164b32014-11-13 14:05:07 +00004419 switch (type) {
4420 case Primitive::kPrimInt:
4421 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004422 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004423 break;
4424 }
4425
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004426 case Primitive::kPrimFloat:
4427 case Primitive::kPrimDouble: {
4428 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
4429 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004430 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004431 if (type == Primitive::kPrimFloat) {
4432 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4433 } else {
4434 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4435 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004436 break;
4437 }
4438
Serban Constantinescu02164b32014-11-13 14:05:07 +00004439 default:
4440 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004441 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004442 }
4443}
4444
Calin Juravle27df7582015-04-17 19:12:31 +01004445void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4446 memory_barrier->SetLocations(nullptr);
4447}
4448
4449void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004450 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004451}
4452
Alexandre Rames5319def2014-10-23 10:03:10 +01004453void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4454 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4455 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004456 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004457}
4458
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004459void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004460 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004461}
4462
4463void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4464 instruction->SetLocations(nullptr);
4465}
4466
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004467void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004468 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004469}
4470
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004471void LocationsBuilderARM64::VisitRor(HRor* ror) {
4472 HandleBinaryOp(ror);
4473}
4474
4475void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4476 HandleBinaryOp(ror);
4477}
4478
Serban Constantinescu02164b32014-11-13 14:05:07 +00004479void LocationsBuilderARM64::VisitShl(HShl* shl) {
4480 HandleShift(shl);
4481}
4482
4483void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4484 HandleShift(shl);
4485}
4486
4487void LocationsBuilderARM64::VisitShr(HShr* shr) {
4488 HandleShift(shr);
4489}
4490
4491void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4492 HandleShift(shr);
4493}
4494
Alexandre Rames5319def2014-10-23 10:03:10 +01004495void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004496 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004497}
4498
4499void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004500 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004501}
4502
Alexandre Rames67555f72014-11-18 10:55:16 +00004503void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004504 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004505}
4506
4507void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004508 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004509}
4510
4511void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004512 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004513}
4514
Alexandre Rames67555f72014-11-18 10:55:16 +00004515void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004516 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004517}
4518
Calin Juravlee460d1d2015-09-29 04:52:17 +01004519void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4520 HUnresolvedInstanceFieldGet* instruction) {
4521 FieldAccessCallingConventionARM64 calling_convention;
4522 codegen_->CreateUnresolvedFieldLocationSummary(
4523 instruction, instruction->GetFieldType(), calling_convention);
4524}
4525
4526void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4527 HUnresolvedInstanceFieldGet* instruction) {
4528 FieldAccessCallingConventionARM64 calling_convention;
4529 codegen_->GenerateUnresolvedFieldAccess(instruction,
4530 instruction->GetFieldType(),
4531 instruction->GetFieldIndex(),
4532 instruction->GetDexPc(),
4533 calling_convention);
4534}
4535
4536void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4537 HUnresolvedInstanceFieldSet* instruction) {
4538 FieldAccessCallingConventionARM64 calling_convention;
4539 codegen_->CreateUnresolvedFieldLocationSummary(
4540 instruction, instruction->GetFieldType(), calling_convention);
4541}
4542
4543void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4544 HUnresolvedInstanceFieldSet* instruction) {
4545 FieldAccessCallingConventionARM64 calling_convention;
4546 codegen_->GenerateUnresolvedFieldAccess(instruction,
4547 instruction->GetFieldType(),
4548 instruction->GetFieldIndex(),
4549 instruction->GetDexPc(),
4550 calling_convention);
4551}
4552
4553void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4554 HUnresolvedStaticFieldGet* instruction) {
4555 FieldAccessCallingConventionARM64 calling_convention;
4556 codegen_->CreateUnresolvedFieldLocationSummary(
4557 instruction, instruction->GetFieldType(), calling_convention);
4558}
4559
4560void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4561 HUnresolvedStaticFieldGet* instruction) {
4562 FieldAccessCallingConventionARM64 calling_convention;
4563 codegen_->GenerateUnresolvedFieldAccess(instruction,
4564 instruction->GetFieldType(),
4565 instruction->GetFieldIndex(),
4566 instruction->GetDexPc(),
4567 calling_convention);
4568}
4569
4570void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4571 HUnresolvedStaticFieldSet* instruction) {
4572 FieldAccessCallingConventionARM64 calling_convention;
4573 codegen_->CreateUnresolvedFieldLocationSummary(
4574 instruction, instruction->GetFieldType(), calling_convention);
4575}
4576
4577void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4578 HUnresolvedStaticFieldSet* instruction) {
4579 FieldAccessCallingConventionARM64 calling_convention;
4580 codegen_->GenerateUnresolvedFieldAccess(instruction,
4581 instruction->GetFieldType(),
4582 instruction->GetFieldIndex(),
4583 instruction->GetDexPc(),
4584 calling_convention);
4585}
4586
Alexandre Rames5319def2014-10-23 10:03:10 +01004587void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4588 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4589}
4590
4591void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004592 HBasicBlock* block = instruction->GetBlock();
4593 if (block->GetLoopInformation() != nullptr) {
4594 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4595 // The back edge will generate the suspend check.
4596 return;
4597 }
4598 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4599 // The goto will generate the suspend check.
4600 return;
4601 }
4602 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004603}
4604
Alexandre Rames67555f72014-11-18 10:55:16 +00004605void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4606 LocationSummary* locations =
4607 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4608 InvokeRuntimeCallingConvention calling_convention;
4609 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4610}
4611
4612void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
4613 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004614 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004615 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004616}
4617
4618void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4619 LocationSummary* locations =
4620 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4621 Primitive::Type input_type = conversion->GetInputType();
4622 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004623 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004624 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4625 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4626 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4627 }
4628
Alexandre Rames542361f2015-01-29 16:57:31 +00004629 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004630 locations->SetInAt(0, Location::RequiresFpuRegister());
4631 } else {
4632 locations->SetInAt(0, Location::RequiresRegister());
4633 }
4634
Alexandre Rames542361f2015-01-29 16:57:31 +00004635 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004636 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4637 } else {
4638 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4639 }
4640}
4641
4642void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4643 Primitive::Type result_type = conversion->GetResultType();
4644 Primitive::Type input_type = conversion->GetInputType();
4645
4646 DCHECK_NE(input_type, result_type);
4647
Alexandre Rames542361f2015-01-29 16:57:31 +00004648 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004649 int result_size = Primitive::ComponentSize(result_type);
4650 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004651 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004652 Register output = OutputRegister(conversion);
4653 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004654 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004655 // 'int' values are used directly as W registers, discarding the top
4656 // bits, so we don't need to sign-extend and can just perform a move.
4657 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4658 // top 32 bits of the target register. We theoretically could leave those
4659 // bits unchanged, but we would have to make sure that no code uses a
4660 // 32bit input value as a 64bit value assuming that the top 32 bits are
4661 // zero.
4662 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004663 } else if (result_type == Primitive::kPrimChar ||
4664 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4665 __ Ubfx(output,
4666 output.IsX() ? source.X() : source.W(),
4667 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004668 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004669 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004670 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004671 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004672 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004673 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004674 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4675 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004676 } else if (Primitive::IsFloatingPointType(result_type) &&
4677 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004678 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4679 } else {
4680 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4681 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004682 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004683}
Alexandre Rames67555f72014-11-18 10:55:16 +00004684
Serban Constantinescu02164b32014-11-13 14:05:07 +00004685void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4686 HandleShift(ushr);
4687}
4688
4689void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4690 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004691}
4692
4693void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4694 HandleBinaryOp(instruction);
4695}
4696
4697void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4698 HandleBinaryOp(instruction);
4699}
4700
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004701void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004702 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004703 LOG(FATAL) << "Unreachable";
4704}
4705
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004706void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004707 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004708 LOG(FATAL) << "Unreachable";
4709}
4710
Mark Mendellfe57faa2015-09-18 09:26:15 -04004711// Simple implementation of packed switch - generate cascaded compare/jumps.
4712void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4713 LocationSummary* locations =
4714 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4715 locations->SetInAt(0, Location::RequiresRegister());
4716}
4717
4718void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4719 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004720 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004721 Register value_reg = InputRegisterAt(switch_instr, 0);
4722 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4723
Zheng Xu3927c8b2015-11-18 17:46:25 +08004724 // Roughly set 16 as max average assemblies generated per HIR in a graph.
4725 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * vixl::kInstructionSize;
4726 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4727 // make sure we don't emit it if the target may run out of range.
4728 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4729 // ranges and emit the tables only as required.
4730 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004731
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004732 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004733 // Current instruction id is an upper bound of the number of HIRs in the graph.
4734 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4735 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004736 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4737 Register temp = temps.AcquireW();
4738 __ Subs(temp, value_reg, Operand(lower_bound));
4739
Zheng Xu3927c8b2015-11-18 17:46:25 +08004740 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004741 // Jump to successors[0] if value == lower_bound.
4742 __ B(eq, codegen_->GetLabelOf(successors[0]));
4743 int32_t last_index = 0;
4744 for (; num_entries - last_index > 2; last_index += 2) {
4745 __ Subs(temp, temp, Operand(2));
4746 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4747 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4748 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4749 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4750 }
4751 if (num_entries - last_index == 2) {
4752 // The last missing case_value.
4753 __ Cmp(temp, Operand(1));
4754 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004755 }
4756
4757 // And the default for any other value.
4758 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4759 __ B(codegen_->GetLabelOf(default_block));
4760 }
4761 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004762 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004763
4764 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4765
4766 // Below instructions should use at most one blocked register. Since there are two blocked
4767 // registers, we are free to block one.
4768 Register temp_w = temps.AcquireW();
4769 Register index;
4770 // Remove the bias.
4771 if (lower_bound != 0) {
4772 index = temp_w;
4773 __ Sub(index, value_reg, Operand(lower_bound));
4774 } else {
4775 index = value_reg;
4776 }
4777
4778 // Jump to default block if index is out of the range.
4779 __ Cmp(index, Operand(num_entries));
4780 __ B(hs, codegen_->GetLabelOf(default_block));
4781
4782 // In current VIXL implementation, it won't require any blocked registers to encode the
4783 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4784 // register pressure.
4785 Register table_base = temps.AcquireX();
4786 // Load jump offset from the table.
4787 __ Adr(table_base, jump_table->GetTableStartLabel());
4788 Register jump_offset = temp_w;
4789 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4790
4791 // Jump to target block by branching to table_base(pc related) + offset.
4792 Register target_address = table_base;
4793 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4794 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004795 }
4796}
4797
Roland Levillain44015862016-01-22 11:47:17 +00004798void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4799 Location out,
4800 uint32_t offset,
4801 Location maybe_temp) {
4802 Primitive::Type type = Primitive::kPrimNot;
4803 Register out_reg = RegisterFrom(out, type);
4804 if (kEmitCompilerReadBarrier) {
4805 Register temp_reg = RegisterFrom(maybe_temp, type);
4806 if (kUseBakerReadBarrier) {
4807 // Load with fast path based Baker's read barrier.
4808 // /* HeapReference<Object> */ out = *(out + offset)
4809 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4810 out,
4811 out_reg,
4812 offset,
4813 temp_reg,
4814 /* needs_null_check */ false,
4815 /* use_load_acquire */ false);
4816 } else {
4817 // Load with slow path based read barrier.
4818 // Save the value of `out` into `maybe_temp` before overwriting it
4819 // in the following move operation, as we will need it for the
4820 // read barrier below.
4821 __ Mov(temp_reg, out_reg);
4822 // /* HeapReference<Object> */ out = *(out + offset)
4823 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4824 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4825 }
4826 } else {
4827 // Plain load with no read barrier.
4828 // /* HeapReference<Object> */ out = *(out + offset)
4829 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4830 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4831 }
4832}
4833
4834void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
4835 Location out,
4836 Location obj,
4837 uint32_t offset,
4838 Location maybe_temp) {
4839 Primitive::Type type = Primitive::kPrimNot;
4840 Register out_reg = RegisterFrom(out, type);
4841 Register obj_reg = RegisterFrom(obj, type);
4842 if (kEmitCompilerReadBarrier) {
4843 if (kUseBakerReadBarrier) {
4844 // Load with fast path based Baker's read barrier.
4845 Register temp_reg = RegisterFrom(maybe_temp, type);
4846 // /* HeapReference<Object> */ out = *(obj + offset)
4847 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4848 out,
4849 obj_reg,
4850 offset,
4851 temp_reg,
4852 /* needs_null_check */ false,
4853 /* use_load_acquire */ false);
4854 } else {
4855 // Load with slow path based read barrier.
4856 // /* HeapReference<Object> */ out = *(obj + offset)
4857 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4858 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4859 }
4860 } else {
4861 // Plain load with no read barrier.
4862 // /* HeapReference<Object> */ out = *(obj + offset)
4863 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4864 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4865 }
4866}
4867
4868void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
4869 Location root,
4870 vixl::Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004871 uint32_t offset,
4872 vixl::Label* fixup_label) {
Roland Levillain44015862016-01-22 11:47:17 +00004873 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
4874 if (kEmitCompilerReadBarrier) {
4875 if (kUseBakerReadBarrier) {
4876 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4877 // Baker's read barrier are used:
4878 //
4879 // root = obj.field;
4880 // if (Thread::Current()->GetIsGcMarking()) {
4881 // root = ReadBarrier::Mark(root)
4882 // }
4883
4884 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004885 if (fixup_label == nullptr) {
4886 __ Ldr(root_reg, MemOperand(obj, offset));
4887 } else {
4888 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4889 __ Bind(fixup_label);
4890 __ ldr(root_reg, MemOperand(obj, offset));
4891 }
Roland Levillain44015862016-01-22 11:47:17 +00004892 static_assert(
4893 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
4894 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
4895 "have different sizes.");
4896 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
4897 "art::mirror::CompressedReference<mirror::Object> and int32_t "
4898 "have different sizes.");
4899
4900 // Slow path used to mark the GC root `root`.
4901 SlowPathCodeARM64* slow_path =
4902 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root, root);
4903 codegen_->AddSlowPath(slow_path);
4904
4905 MacroAssembler* masm = GetVIXLAssembler();
4906 UseScratchRegisterScope temps(masm);
4907 Register temp = temps.AcquireW();
4908 // temp = Thread::Current()->GetIsGcMarking()
4909 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64WordSize>().Int32Value()));
4910 __ Cbnz(temp, slow_path->GetEntryLabel());
4911 __ Bind(slow_path->GetExitLabel());
4912 } else {
4913 // GC root loaded through a slow path for read barriers other
4914 // than Baker's.
4915 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004916 if (fixup_label == nullptr) {
4917 __ Add(root_reg.X(), obj.X(), offset);
4918 } else {
4919 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4920 __ Bind(fixup_label);
4921 __ add(root_reg.X(), obj.X(), offset);
4922 }
Roland Levillain44015862016-01-22 11:47:17 +00004923 // /* mirror::Object* */ root = root->Read()
4924 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
4925 }
4926 } else {
4927 // Plain GC root load with no read barrier.
4928 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004929 if (fixup_label == nullptr) {
4930 __ Ldr(root_reg, MemOperand(obj, offset));
4931 } else {
4932 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4933 __ Bind(fixup_label);
4934 __ ldr(root_reg, MemOperand(obj, offset));
4935 }
Roland Levillain44015862016-01-22 11:47:17 +00004936 // Note that GC roots are not affected by heap poisoning, thus we
4937 // do not have to unpoison `root_reg` here.
4938 }
4939}
4940
4941void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
4942 Location ref,
4943 vixl::Register obj,
4944 uint32_t offset,
4945 Register temp,
4946 bool needs_null_check,
4947 bool use_load_acquire) {
4948 DCHECK(kEmitCompilerReadBarrier);
4949 DCHECK(kUseBakerReadBarrier);
4950
4951 // /* HeapReference<Object> */ ref = *(obj + offset)
4952 Location no_index = Location::NoLocation();
Roland Levillainbfea3352016-06-23 13:48:47 +01004953 size_t no_scale_factor = 0U;
4954 GenerateReferenceLoadWithBakerReadBarrier(instruction,
4955 ref,
4956 obj,
4957 offset,
4958 no_index,
4959 no_scale_factor,
4960 temp,
4961 needs_null_check,
4962 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00004963}
4964
4965void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
4966 Location ref,
4967 vixl::Register obj,
4968 uint32_t data_offset,
4969 Location index,
4970 Register temp,
4971 bool needs_null_check) {
4972 DCHECK(kEmitCompilerReadBarrier);
4973 DCHECK(kUseBakerReadBarrier);
4974
4975 // Array cells are never volatile variables, therefore array loads
4976 // never use Load-Acquire instructions on ARM64.
4977 const bool use_load_acquire = false;
4978
Roland Levillainbfea3352016-06-23 13:48:47 +01004979 static_assert(
4980 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4981 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00004982 // /* HeapReference<Object> */ ref =
4983 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01004984 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
4985 GenerateReferenceLoadWithBakerReadBarrier(instruction,
4986 ref,
4987 obj,
4988 data_offset,
4989 index,
4990 scale_factor,
4991 temp,
4992 needs_null_check,
4993 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00004994}
4995
4996void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
4997 Location ref,
4998 vixl::Register obj,
4999 uint32_t offset,
5000 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005001 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005002 Register temp,
5003 bool needs_null_check,
5004 bool use_load_acquire) {
5005 DCHECK(kEmitCompilerReadBarrier);
5006 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005007 // If we are emitting an array load, we should not be using a
5008 // Load Acquire instruction. In other words:
5009 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5010 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005011
5012 MacroAssembler* masm = GetVIXLAssembler();
5013 UseScratchRegisterScope temps(masm);
5014
5015 // In slow path based read barriers, the read barrier call is
5016 // inserted after the original load. However, in fast path based
5017 // Baker's read barriers, we need to perform the load of
5018 // mirror::Object::monitor_ *before* the original reference load.
5019 // This load-load ordering is required by the read barrier.
5020 // The fast path/slow path (for Baker's algorithm) should look like:
5021 //
5022 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5023 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5024 // HeapReference<Object> ref = *src; // Original reference load.
5025 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5026 // if (is_gray) {
5027 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5028 // }
5029 //
5030 // Note: the original implementation in ReadBarrier::Barrier is
5031 // slightly more complex as it performs additional checks that we do
5032 // not do here for performance reasons.
5033
5034 Primitive::Type type = Primitive::kPrimNot;
5035 Register ref_reg = RegisterFrom(ref, type);
5036 DCHECK(obj.IsW());
5037 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5038
5039 // /* int32_t */ monitor = obj->monitor_
5040 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5041 if (needs_null_check) {
5042 MaybeRecordImplicitNullCheck(instruction);
5043 }
5044 // /* LockWord */ lock_word = LockWord(monitor)
5045 static_assert(sizeof(LockWord) == sizeof(int32_t),
5046 "art::LockWord and int32_t have different sizes.");
5047 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
5048 __ Lsr(temp, temp, LockWord::kReadBarrierStateShift);
5049 __ And(temp, temp, Operand(LockWord::kReadBarrierStateMask));
5050 static_assert(
5051 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
5052 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
5053
5054 // Introduce a dependency on the high bits of rb_state, which shall
5055 // be all zeroes, to prevent load-load reordering, and without using
5056 // a memory barrier (which would be more expensive).
5057 // temp2 = rb_state & ~LockWord::kReadBarrierStateMask = 0
5058 Register temp2 = temps.AcquireW();
5059 __ Bic(temp2, temp, Operand(LockWord::kReadBarrierStateMask));
5060 // obj is unchanged by this operation, but its value now depends on
5061 // temp2, which depends on temp.
5062 __ Add(obj, obj, Operand(temp2));
5063 temps.Release(temp2);
5064
5065 // The actual reference load.
5066 if (index.IsValid()) {
Roland Levillainbfea3352016-06-23 13:48:47 +01005067 // Load types involving an "index".
5068 if (use_load_acquire) {
5069 // UnsafeGetObjectVolatile intrinsic case.
5070 // Register `index` is not an index in an object array, but an
5071 // offset to an object reference field within object `obj`.
5072 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5073 DCHECK(instruction->GetLocations()->Intrinsified());
5074 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5075 << instruction->AsInvoke()->GetIntrinsic();
5076 DCHECK_EQ(offset, 0U);
5077 DCHECK_EQ(scale_factor, 0U);
5078 DCHECK_EQ(needs_null_check, 0U);
5079 // /* HeapReference<Object> */ ref = *(obj + index)
5080 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5081 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005082 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005083 // ArrayGet and UnsafeGetObject intrinsics cases.
5084 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5085 if (index.IsConstant()) {
5086 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5087 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5088 } else {
5089 temp2 = temps.AcquireW();
5090 __ Add(temp2, obj, offset);
5091 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, scale_factor));
5092 temps.Release(temp2);
5093 }
Roland Levillain44015862016-01-22 11:47:17 +00005094 }
Roland Levillain44015862016-01-22 11:47:17 +00005095 } else {
5096 // /* HeapReference<Object> */ ref = *(obj + offset)
5097 MemOperand field = HeapOperand(obj, offset);
5098 if (use_load_acquire) {
5099 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5100 } else {
5101 Load(type, ref_reg, field);
5102 }
5103 }
5104
5105 // Object* ref = ref_addr->AsMirrorPtr()
5106 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5107
5108 // Slow path used to mark the object `ref` when it is gray.
5109 SlowPathCodeARM64* slow_path =
5110 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref, ref);
5111 AddSlowPath(slow_path);
5112
5113 // if (rb_state == ReadBarrier::gray_ptr_)
5114 // ref = ReadBarrier::Mark(ref);
5115 __ Cmp(temp, ReadBarrier::gray_ptr_);
5116 __ B(eq, slow_path->GetEntryLabel());
5117 __ Bind(slow_path->GetExitLabel());
5118}
5119
5120void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5121 Location out,
5122 Location ref,
5123 Location obj,
5124 uint32_t offset,
5125 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005126 DCHECK(kEmitCompilerReadBarrier);
5127
Roland Levillain44015862016-01-22 11:47:17 +00005128 // Insert a slow path based read barrier *after* the reference load.
5129 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005130 // If heap poisoning is enabled, the unpoisoning of the loaded
5131 // reference will be carried out by the runtime within the slow
5132 // path.
5133 //
5134 // Note that `ref` currently does not get unpoisoned (when heap
5135 // poisoning is enabled), which is alright as the `ref` argument is
5136 // not used by the artReadBarrierSlow entry point.
5137 //
5138 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5139 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5140 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5141 AddSlowPath(slow_path);
5142
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005143 __ B(slow_path->GetEntryLabel());
5144 __ Bind(slow_path->GetExitLabel());
5145}
5146
Roland Levillain44015862016-01-22 11:47:17 +00005147void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5148 Location out,
5149 Location ref,
5150 Location obj,
5151 uint32_t offset,
5152 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005153 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005154 // Baker's read barriers shall be handled by the fast path
5155 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5156 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005157 // If heap poisoning is enabled, unpoisoning will be taken care of
5158 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005159 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005160 } else if (kPoisonHeapReferences) {
5161 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5162 }
5163}
5164
Roland Levillain44015862016-01-22 11:47:17 +00005165void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5166 Location out,
5167 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005168 DCHECK(kEmitCompilerReadBarrier);
5169
Roland Levillain44015862016-01-22 11:47:17 +00005170 // Insert a slow path based read barrier *after* the GC root load.
5171 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005172 // Note that GC roots are not affected by heap poisoning, so we do
5173 // not need to do anything special for this here.
5174 SlowPathCodeARM64* slow_path =
5175 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5176 AddSlowPath(slow_path);
5177
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005178 __ B(slow_path->GetEntryLabel());
5179 __ Bind(slow_path->GetExitLabel());
5180}
5181
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005182void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5183 LocationSummary* locations =
5184 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5185 locations->SetInAt(0, Location::RequiresRegister());
5186 locations->SetOut(Location::RequiresRegister());
5187}
5188
5189void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5190 LocationSummary* locations = instruction->GetLocations();
5191 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00005192 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005193 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5194 instruction->GetIndex(), kArm64PointerSize).SizeValue();
5195 } else {
Nelli Kimbadee982016-05-13 13:08:53 +03005196 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5197 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
5198 method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity50706432016-06-14 11:31:04 -07005199 instruction->GetIndex(), kArm64PointerSize));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005200 }
5201 __ Ldr(XRegisterFrom(locations->Out()),
5202 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
5203}
5204
5205
5206
Alexandre Rames67555f72014-11-18 10:55:16 +00005207#undef __
5208#undef QUICK_ENTRY_POINT
5209
Alexandre Rames5319def2014-10-23 10:03:10 +01005210} // namespace arm64
5211} // namespace art