blob: 9407669733175344e10910569e2111a80461b74b [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM64_H_
18#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM64_H_
19
Vladimir Markoca1e0382018-04-11 09:58:41 +000020#include "base/bit_field.h"
Vladimir Markoe2727152019-10-10 10:46:42 +010021#include "base/macros.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010022#include "code_generator.h"
Calin Juravlee460d1d2015-09-29 04:52:17 +010023#include "common_arm64.h"
David Sehr9e734c72018-01-04 17:56:19 -080024#include "dex/dex_file_types.h"
David Sehr312f3b22018-03-19 08:39:26 -070025#include "dex/string_reference.h"
26#include "dex/type_reference.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000027#include "driver/compiler_options.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "nodes.h"
29#include "parallel_move_resolver.h"
30#include "utils/arm64/assembler_arm64.h"
Scott Wakeling97c72b72016-06-24 16:19:36 +010031
Artem Serovaf4e42a2016-08-08 15:11:24 +010032// TODO(VIXL): Make VIXL compile with -Wshadow.
Scott Wakeling97c72b72016-06-24 16:19:36 +010033#pragma GCC diagnostic push
34#pragma GCC diagnostic ignored "-Wshadow"
Artem Serovaf4e42a2016-08-08 15:11:24 +010035#include "aarch64/disasm-aarch64.h"
36#include "aarch64/macro-assembler-aarch64.h"
Scott Wakeling97c72b72016-06-24 16:19:36 +010037#pragma GCC diagnostic pop
Alexandre Rames5319def2014-10-23 10:03:10 +010038
Vladimir Markoe2727152019-10-10 10:46:42 +010039namespace art HIDDEN {
Vladimir Markoca1e0382018-04-11 09:58:41 +000040
41namespace linker {
42class Arm64RelativePatcherTest;
43} // namespace linker
44
Alexandre Rames5319def2014-10-23 10:03:10 +010045namespace arm64 {
46
47class CodeGeneratorARM64;
Andreas Gampe878d58c2015-01-15 23:24:00 -080048
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +000049// Use a local definition to prevent copying mistakes.
Andreas Gampe542451c2016-07-26 09:02:02 -070050static constexpr size_t kArm64WordSize = static_cast<size_t>(kArm64PointerSize);
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +000051
Artem Serov914d7a82017-02-07 14:33:49 +000052// These constants are used as an approximate margin when emission of veneer and literal pools
53// must be blocked.
54static constexpr int kMaxMacroInstructionSizeInBytes = 15 * vixl::aarch64::kInstructionSize;
55static constexpr int kInvokeCodeMarginSizeInBytes = 6 * kMaxMacroInstructionSizeInBytes;
56
Scott Wakeling97c72b72016-06-24 16:19:36 +010057static const vixl::aarch64::Register kParameterCoreRegisters[] = {
58 vixl::aarch64::x1,
59 vixl::aarch64::x2,
60 vixl::aarch64::x3,
61 vixl::aarch64::x4,
62 vixl::aarch64::x5,
63 vixl::aarch64::x6,
64 vixl::aarch64::x7
Alexandre Rames5319def2014-10-23 10:03:10 +010065};
66static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
Scott Wakeling97c72b72016-06-24 16:19:36 +010067static const vixl::aarch64::FPRegister kParameterFPRegisters[] = {
68 vixl::aarch64::d0,
69 vixl::aarch64::d1,
70 vixl::aarch64::d2,
71 vixl::aarch64::d3,
72 vixl::aarch64::d4,
73 vixl::aarch64::d5,
74 vixl::aarch64::d6,
75 vixl::aarch64::d7
Alexandre Rames5319def2014-10-23 10:03:10 +010076};
77static constexpr size_t kParameterFPRegistersLength = arraysize(kParameterFPRegisters);
78
Roland Levillain97c46462017-05-11 14:04:03 +010079// Thread Register.
Scott Wakeling97c72b72016-06-24 16:19:36 +010080const vixl::aarch64::Register tr = vixl::aarch64::x19;
Roland Levillain97c46462017-05-11 14:04:03 +010081// Marking Register.
82const vixl::aarch64::Register mr = vixl::aarch64::x20;
Scott Wakeling97c72b72016-06-24 16:19:36 +010083// Method register on invoke.
84static const vixl::aarch64::Register kArtMethodRegister = vixl::aarch64::x0;
85const vixl::aarch64::CPURegList vixl_reserved_core_registers(vixl::aarch64::ip0,
86 vixl::aarch64::ip1);
87const vixl::aarch64::CPURegList vixl_reserved_fp_registers(vixl::aarch64::d31);
Alexandre Rames5319def2014-10-23 10:03:10 +010088
Roland Levillain97c46462017-05-11 14:04:03 +010089const vixl::aarch64::CPURegList runtime_reserved_core_registers =
90 vixl::aarch64::CPURegList(
91 tr,
92 // Reserve X20 as Marking Register when emitting Baker read barriers.
93 ((kEmitCompilerReadBarrier && kUseBakerReadBarrier) ? mr : vixl::aarch64::NoCPUReg),
94 vixl::aarch64::lr);
Serban Constantinescu3d087de2015-01-28 11:57:05 +000095
Vladimir Marko248141f2018-08-10 10:40:07 +010096// Some instructions have special requirements for a temporary, for example
97// LoadClass/kBssEntry and LoadString/kBssEntry for Baker read barrier require
98// temp that's not an R0 (to avoid an extra move) and Baker read barrier field
99// loads with large offsets need a fixed register to limit the number of link-time
100// thunks we generate. For these and similar cases, we want to reserve a specific
101// register that's neither callee-save nor an argument register. We choose x15.
102inline Location FixedTempLocation() {
103 return Location::RegisterLocation(vixl::aarch64::x15.GetCode());
104}
105
Roland Levillain97c46462017-05-11 14:04:03 +0100106// Callee-save registers AAPCS64, without x19 (Thread Register) (nor
107// x20 (Marking Register) when emitting Baker read barriers).
108const vixl::aarch64::CPURegList callee_saved_core_registers(
109 vixl::aarch64::CPURegister::kRegister,
110 vixl::aarch64::kXRegSize,
111 ((kEmitCompilerReadBarrier && kUseBakerReadBarrier)
112 ? vixl::aarch64::x21.GetCode()
113 : vixl::aarch64::x20.GetCode()),
114 vixl::aarch64::x30.GetCode());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100115const vixl::aarch64::CPURegList callee_saved_fp_registers(vixl::aarch64::CPURegister::kFPRegister,
116 vixl::aarch64::kDRegSize,
117 vixl::aarch64::d8.GetCode(),
118 vixl::aarch64::d15.GetCode());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100119Location ARM64ReturnLocation(DataType::Type return_type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000120
Andreas Gampe878d58c2015-01-15 23:24:00 -0800121class SlowPathCodeARM64 : public SlowPathCode {
122 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000123 explicit SlowPathCodeARM64(HInstruction* instruction)
124 : SlowPathCode(instruction), entry_label_(), exit_label_() {}
Andreas Gampe878d58c2015-01-15 23:24:00 -0800125
Scott Wakeling97c72b72016-06-24 16:19:36 +0100126 vixl::aarch64::Label* GetEntryLabel() { return &entry_label_; }
127 vixl::aarch64::Label* GetExitLabel() { return &exit_label_; }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800128
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100129 void SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) override;
130 void RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) override;
Zheng Xuda403092015-04-24 17:35:39 +0800131
Andreas Gampe878d58c2015-01-15 23:24:00 -0800132 private:
Scott Wakeling97c72b72016-06-24 16:19:36 +0100133 vixl::aarch64::Label entry_label_;
134 vixl::aarch64::Label exit_label_;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800135
136 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
137};
138
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100139class JumpTableARM64 : public DeletableArenaObject<kArenaAllocSwitchTable> {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800140 public:
141 explicit JumpTableARM64(HPackedSwitch* switch_instr)
142 : switch_instr_(switch_instr), table_start_() {}
143
Scott Wakeling97c72b72016-06-24 16:19:36 +0100144 vixl::aarch64::Label* GetTableStartLabel() { return &table_start_; }
Zheng Xu3927c8b2015-11-18 17:46:25 +0800145
146 void EmitTable(CodeGeneratorARM64* codegen);
147
148 private:
149 HPackedSwitch* const switch_instr_;
Scott Wakeling97c72b72016-06-24 16:19:36 +0100150 vixl::aarch64::Label table_start_;
Zheng Xu3927c8b2015-11-18 17:46:25 +0800151
152 DISALLOW_COPY_AND_ASSIGN(JumpTableARM64);
153};
154
Scott Wakeling97c72b72016-06-24 16:19:36 +0100155static const vixl::aarch64::Register kRuntimeParameterCoreRegisters[] =
156 { vixl::aarch64::x0,
157 vixl::aarch64::x1,
158 vixl::aarch64::x2,
159 vixl::aarch64::x3,
160 vixl::aarch64::x4,
161 vixl::aarch64::x5,
162 vixl::aarch64::x6,
163 vixl::aarch64::x7 };
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000164static constexpr size_t kRuntimeParameterCoreRegistersLength =
165 arraysize(kRuntimeParameterCoreRegisters);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100166static const vixl::aarch64::FPRegister kRuntimeParameterFpuRegisters[] =
167 { vixl::aarch64::d0,
168 vixl::aarch64::d1,
169 vixl::aarch64::d2,
170 vixl::aarch64::d3,
171 vixl::aarch64::d4,
172 vixl::aarch64::d5,
173 vixl::aarch64::d6,
174 vixl::aarch64::d7 };
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000175static constexpr size_t kRuntimeParameterFpuRegistersLength =
176 arraysize(kRuntimeParameterCoreRegisters);
177
Scott Wakeling97c72b72016-06-24 16:19:36 +0100178class InvokeRuntimeCallingConvention : public CallingConvention<vixl::aarch64::Register,
179 vixl::aarch64::FPRegister> {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000180 public:
181 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
182
183 InvokeRuntimeCallingConvention()
184 : CallingConvention(kRuntimeParameterCoreRegisters,
185 kRuntimeParameterCoreRegistersLength,
186 kRuntimeParameterFpuRegisters,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700187 kRuntimeParameterFpuRegistersLength,
188 kArm64PointerSize) {}
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000189
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100190 Location GetReturnLocation(DataType::Type return_type);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000191
192 private:
193 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
194};
195
Scott Wakeling97c72b72016-06-24 16:19:36 +0100196class InvokeDexCallingConvention : public CallingConvention<vixl::aarch64::Register,
197 vixl::aarch64::FPRegister> {
Alexandre Rames5319def2014-10-23 10:03:10 +0100198 public:
199 InvokeDexCallingConvention()
200 : CallingConvention(kParameterCoreRegisters,
201 kParameterCoreRegistersLength,
202 kParameterFPRegisters,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700203 kParameterFPRegistersLength,
204 kArm64PointerSize) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100205
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100206 Location GetReturnLocation(DataType::Type return_type) const {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000207 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100208 }
209
210
211 private:
212 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConvention);
213};
214
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100215class InvokeDexCallingConventionVisitorARM64 : public InvokeDexCallingConventionVisitor {
Alexandre Rames5319def2014-10-23 10:03:10 +0100216 public:
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100217 InvokeDexCallingConventionVisitorARM64() {}
218 virtual ~InvokeDexCallingConventionVisitorARM64() {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100219
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100220 Location GetNextLocation(DataType::Type type) override;
221 Location GetReturnLocation(DataType::Type return_type) const override {
Alexandre Rames5319def2014-10-23 10:03:10 +0100222 return calling_convention.GetReturnLocation(return_type);
223 }
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100224 Location GetMethodLocation() const override;
Alexandre Rames5319def2014-10-23 10:03:10 +0100225
226 private:
227 InvokeDexCallingConvention calling_convention;
Alexandre Rames5319def2014-10-23 10:03:10 +0100228
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100229 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorARM64);
Alexandre Rames5319def2014-10-23 10:03:10 +0100230};
231
Calin Juravlee460d1d2015-09-29 04:52:17 +0100232class FieldAccessCallingConventionARM64 : public FieldAccessCallingConvention {
233 public:
234 FieldAccessCallingConventionARM64() {}
235
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100236 Location GetObjectLocation() const override {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100237 return helpers::LocationFrom(vixl::aarch64::x1);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100238 }
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100239 Location GetFieldIndexLocation() const override {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100240 return helpers::LocationFrom(vixl::aarch64::x0);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100241 }
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100242 Location GetReturnLocation(DataType::Type type ATTRIBUTE_UNUSED) const override {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100243 return helpers::LocationFrom(vixl::aarch64::x0);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100244 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100245 Location GetSetValueLocation(DataType::Type type ATTRIBUTE_UNUSED,
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100246 bool is_instance) const override {
Nicolas Geoffray5b3c6c02017-01-19 14:22:26 +0000247 return is_instance
Scott Wakeling97c72b72016-06-24 16:19:36 +0100248 ? helpers::LocationFrom(vixl::aarch64::x2)
Nicolas Geoffray5b3c6c02017-01-19 14:22:26 +0000249 : helpers::LocationFrom(vixl::aarch64::x1);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100250 }
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100251 Location GetFpuLocation(DataType::Type type ATTRIBUTE_UNUSED) const override {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100252 return helpers::LocationFrom(vixl::aarch64::d0);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100253 }
254
255 private:
256 DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConventionARM64);
257};
258
Aart Bik42249c32016-01-07 15:33:50 -0800259class InstructionCodeGeneratorARM64 : public InstructionCodeGenerator {
Alexandre Rames5319def2014-10-23 10:03:10 +0100260 public:
261 InstructionCodeGeneratorARM64(HGraph* graph, CodeGeneratorARM64* codegen);
262
263#define DECLARE_VISIT_INSTRUCTION(name, super) \
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100264 void Visit##name(H##name* instr) override;
Alexandre Ramesef20f712015-06-09 10:29:30 +0100265
266 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
267 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(DECLARE_VISIT_INSTRUCTION)
Artem Udovichenko4a0dad62016-01-26 12:28:31 +0300268 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(DECLARE_VISIT_INSTRUCTION)
Alexandre Ramesef20f712015-06-09 10:29:30 +0100269
Alexandre Rames5319def2014-10-23 10:03:10 +0100270#undef DECLARE_VISIT_INSTRUCTION
271
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100272 void VisitInstruction(HInstruction* instruction) override {
Alexandre Ramesef20f712015-06-09 10:29:30 +0100273 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
274 << " (id " << instruction->GetId() << ")";
275 }
276
Alexandre Rames5319def2014-10-23 10:03:10 +0100277 Arm64Assembler* GetAssembler() const { return assembler_; }
Alexandre Rames087930f2016-08-02 13:45:28 +0100278 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); }
Alexandre Rames5319def2014-10-23 10:03:10 +0100279
280 private:
Scott Wakeling97c72b72016-06-24 16:19:36 +0100281 void GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
282 vixl::aarch64::Register class_reg);
Vladimir Marko175e7862018-03-27 09:03:13 +0000283 void GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
284 vixl::aarch64::Register temp);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000285 void GenerateSuspendCheck(HSuspendCheck* instruction, HBasicBlock* successor);
Alexandre Rames67555f72014-11-18 10:55:16 +0000286 void HandleBinaryOp(HBinaryOperation* instr);
Roland Levillain44015862016-01-22 11:47:17 +0000287
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100288 void HandleFieldSet(HInstruction* instruction,
289 const FieldInfo& field_info,
290 bool value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +0100291 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +0000292 void HandleCondition(HCondition* instruction);
Roland Levillain44015862016-01-22 11:47:17 +0000293
294 // Generate a heap reference load using one register `out`:
295 //
296 // out <- *(out + offset)
297 //
298 // while honoring heap poisoning and/or read barriers (if any).
299 //
300 // Location `maybe_temp` is used when generating a read barrier and
301 // shall be a register in that case; it may be an invalid location
302 // otherwise.
303 void GenerateReferenceLoadOneRegister(HInstruction* instruction,
304 Location out,
305 uint32_t offset,
Mathieu Chartieraa474eb2016-11-09 15:18:27 -0800306 Location maybe_temp,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -0800307 ReadBarrierOption read_barrier_option);
Roland Levillain44015862016-01-22 11:47:17 +0000308 // Generate a heap reference load using two different registers
309 // `out` and `obj`:
310 //
311 // out <- *(obj + offset)
312 //
313 // while honoring heap poisoning and/or read barriers (if any).
314 //
315 // Location `maybe_temp` is used when generating a Baker's (fast
316 // path) read barrier and shall be a register in that case; it may
317 // be an invalid location otherwise.
318 void GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
319 Location out,
320 Location obj,
321 uint32_t offset,
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -0700322 Location maybe_temp,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -0800323 ReadBarrierOption read_barrier_option);
Roland Levillain44015862016-01-22 11:47:17 +0000324
Roland Levillain1a653882016-03-18 18:05:57 +0000325 // Generate a floating-point comparison.
326 void GenerateFcmp(HInstruction* instruction);
327
Serban Constantinescu02164b32014-11-13 14:05:07 +0000328 void HandleShift(HBinaryOperation* instr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700329 void GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +0000330 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100331 vixl::aarch64::Label* true_target,
332 vixl::aarch64::Label* false_target);
Zheng Xuc6667102015-05-15 16:08:45 +0800333 void DivRemOneOrMinusOne(HBinaryOperation* instruction);
334 void DivRemByPowerOfTwo(HBinaryOperation* instruction);
335 void GenerateDivRemWithAnyConstant(HBinaryOperation* instruction);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +0100336 void GenerateIntDiv(HDiv* instruction);
337 void GenerateIntDivForConstDenom(HDiv *instruction);
338 void GenerateIntDivForPower2Denom(HDiv *instruction);
339 void GenerateIntRem(HRem* instruction);
340 void GenerateIntRemForConstDenom(HRem *instruction);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +0100341 void GenerateIntRemForPower2Denom(HRem *instruction);
David Brazdilfc6a86a2015-06-26 10:33:45 +0000342 void HandleGoto(HInstruction* got, HBasicBlock* successor);
Alexandre Rames5319def2014-10-23 10:03:10 +0100343
Aart Bik472821b2017-04-27 17:23:51 -0700344 vixl::aarch64::MemOperand VecAddress(
Aart Bikf8f5a162017-02-06 15:35:29 -0800345 HVecMemoryOperation* instruction,
Artem Serov0225b772017-04-19 15:43:53 +0100346 // This function may acquire a scratch register.
Aart Bik472821b2017-04-27 17:23:51 -0700347 vixl::aarch64::UseScratchRegisterScope* temps_scope,
348 size_t size,
349 bool is_string_char_at,
350 /*out*/ vixl::aarch64::Register* scratch);
Aart Bikf8f5a162017-02-06 15:35:29 -0800351
Alexandre Rames5319def2014-10-23 10:03:10 +0100352 Arm64Assembler* const assembler_;
353 CodeGeneratorARM64* const codegen_;
354
355 DISALLOW_COPY_AND_ASSIGN(InstructionCodeGeneratorARM64);
356};
357
358class LocationsBuilderARM64 : public HGraphVisitor {
359 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100360 LocationsBuilderARM64(HGraph* graph, CodeGeneratorARM64* codegen)
Alexandre Rames5319def2014-10-23 10:03:10 +0100361 : HGraphVisitor(graph), codegen_(codegen) {}
362
363#define DECLARE_VISIT_INSTRUCTION(name, super) \
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100364 void Visit##name(H##name* instr) override;
Alexandre Ramesef20f712015-06-09 10:29:30 +0100365
366 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
367 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(DECLARE_VISIT_INSTRUCTION)
Artem Udovichenko4a0dad62016-01-26 12:28:31 +0300368 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(DECLARE_VISIT_INSTRUCTION)
Alexandre Ramesef20f712015-06-09 10:29:30 +0100369
Alexandre Rames5319def2014-10-23 10:03:10 +0100370#undef DECLARE_VISIT_INSTRUCTION
371
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100372 void VisitInstruction(HInstruction* instruction) override {
Alexandre Ramesef20f712015-06-09 10:29:30 +0100373 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
374 << " (id " << instruction->GetId() << ")";
375 }
376
Alexandre Rames5319def2014-10-23 10:03:10 +0100377 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000378 void HandleBinaryOp(HBinaryOperation* instr);
Alexandre Rames09a99962015-04-15 11:47:56 +0100379 void HandleFieldSet(HInstruction* instruction);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000380 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
Alexandre Rames5319def2014-10-23 10:03:10 +0100381 void HandleInvoke(HInvoke* instr);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +0000382 void HandleCondition(HCondition* instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +0100383 void HandleShift(HBinaryOperation* instr);
Alexandre Rames5319def2014-10-23 10:03:10 +0100384
385 CodeGeneratorARM64* const codegen_;
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100386 InvokeDexCallingConventionVisitorARM64 parameter_visitor_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100387
388 DISALLOW_COPY_AND_ASSIGN(LocationsBuilderARM64);
389};
390
Zheng Xuad4450e2015-04-17 18:48:56 +0800391class ParallelMoveResolverARM64 : public ParallelMoveResolverNoSwap {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000392 public:
393 ParallelMoveResolverARM64(ArenaAllocator* allocator, CodeGeneratorARM64* codegen)
Zheng Xuad4450e2015-04-17 18:48:56 +0800394 : ParallelMoveResolverNoSwap(allocator), codegen_(codegen), vixl_temps_() {}
Alexandre Rames3e69f162014-12-10 10:36:50 +0000395
Zheng Xuad4450e2015-04-17 18:48:56 +0800396 protected:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100397 void PrepareForEmitNativeCode() override;
398 void FinishEmitNativeCode() override;
399 Location AllocateScratchLocationFor(Location::Kind kind) override;
400 void FreeScratchLocation(Location loc) override;
401 void EmitMove(size_t index) override;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000402
403 private:
404 Arm64Assembler* GetAssembler() const;
Scott Wakeling97c72b72016-06-24 16:19:36 +0100405 vixl::aarch64::MacroAssembler* GetVIXLAssembler() const {
Alexandre Rames087930f2016-08-02 13:45:28 +0100406 return GetAssembler()->GetVIXLAssembler();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000407 }
408
409 CodeGeneratorARM64* const codegen_;
Scott Wakeling97c72b72016-06-24 16:19:36 +0100410 vixl::aarch64::UseScratchRegisterScope vixl_temps_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000411
412 DISALLOW_COPY_AND_ASSIGN(ParallelMoveResolverARM64);
413};
414
Alexandre Rames5319def2014-10-23 10:03:10 +0100415class CodeGeneratorARM64 : public CodeGenerator {
416 public:
Serban Constantinescu579885a2015-02-22 20:51:33 +0000417 CodeGeneratorARM64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100418 const CompilerOptions& compiler_options,
419 OptimizingCompilerStats* stats = nullptr);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000420 virtual ~CodeGeneratorARM64() {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100421
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100422 void GenerateFrameEntry() override;
423 void GenerateFrameExit() override;
Alexandre Rames5319def2014-10-23 10:03:10 +0100424
Scott Wakeling97c72b72016-06-24 16:19:36 +0100425 vixl::aarch64::CPURegList GetFramePreservedCoreRegisters() const;
426 vixl::aarch64::CPURegList GetFramePreservedFPRegisters() const;
Alexandre Rames5319def2014-10-23 10:03:10 +0100427
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100428 void Bind(HBasicBlock* block) override;
Alexandre Rames5319def2014-10-23 10:03:10 +0100429
Scott Wakeling97c72b72016-06-24 16:19:36 +0100430 vixl::aarch64::Label* GetLabelOf(HBasicBlock* block) {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100431 block = FirstNonEmptyBlock(block);
432 return &(block_labels_[block->GetBlockId()]);
Alexandre Rames5319def2014-10-23 10:03:10 +0100433 }
434
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100435 size_t GetWordSize() const override {
Alexandre Rames5319def2014-10-23 10:03:10 +0100436 return kArm64WordSize;
437 }
438
Artem Serov6a0b6572019-07-26 20:38:37 +0100439 size_t GetSlowPathFPWidth() const override {
Artem Serovd4bccf12017-04-03 18:47:32 +0100440 return GetGraph()->HasSIMD()
Artem Serov6a0b6572019-07-26 20:38:37 +0100441 ? vixl::aarch64::kQRegSizeInBytes
442 : vixl::aarch64::kDRegSizeInBytes;
443 }
444
445 size_t GetCalleePreservedFPWidth() const override {
446 return vixl::aarch64::kDRegSizeInBytes;
Mark Mendellf85a9ca2015-01-13 09:20:58 -0500447 }
448
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100449 uintptr_t GetAddressOf(HBasicBlock* block) override {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100450 vixl::aarch64::Label* block_entry_label = GetLabelOf(block);
Alexandre Rames67555f72014-11-18 10:55:16 +0000451 DCHECK(block_entry_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100452 return block_entry_label->GetLocation();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000453 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100454
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100455 HGraphVisitor* GetLocationBuilder() override { return &location_builder_; }
456 HGraphVisitor* GetInstructionVisitor() override { return &instruction_visitor_; }
457 Arm64Assembler* GetAssembler() override { return &assembler_; }
458 const Arm64Assembler& GetAssembler() const override { return assembler_; }
Alexandre Rames087930f2016-08-02 13:45:28 +0100459 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); }
Alexandre Rames5319def2014-10-23 10:03:10 +0100460
461 // Emit a write barrier.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100462 void MarkGCCard(vixl::aarch64::Register object,
463 vixl::aarch64::Register value,
464 bool value_can_be_null);
Alexandre Rames5319def2014-10-23 10:03:10 +0100465
Roland Levillain44015862016-01-22 11:47:17 +0000466 void GenerateMemoryBarrier(MemBarrierKind kind);
467
Alexandre Rames5319def2014-10-23 10:03:10 +0100468 // Register allocation.
469
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100470 void SetupBlockedRegisters() const override;
Alexandre Rames5319def2014-10-23 10:03:10 +0100471
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100472 size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) override;
473 size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) override;
474 size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) override;
475 size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) override;
Alexandre Rames5319def2014-10-23 10:03:10 +0100476
477 // The number of registers that can be allocated. The register allocator may
478 // decide to reserve and not use a few of them.
479 // We do not consider registers sp, xzr, wzr. They are either not allocatable
480 // (xzr, wzr), or make for poor allocatable registers (sp alignment
481 // requirements, etc.). This also facilitates our task as all other registers
482 // can easily be mapped via to or from their type and index or code.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100483 static const int kNumberOfAllocatableRegisters = vixl::aarch64::kNumberOfRegisters - 1;
484 static const int kNumberOfAllocatableFPRegisters = vixl::aarch64::kNumberOfFPRegisters;
Alexandre Rames5319def2014-10-23 10:03:10 +0100485 static constexpr int kNumberOfAllocatableRegisterPairs = 0;
486
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100487 void DumpCoreRegister(std::ostream& stream, int reg) const override;
488 void DumpFloatingPointRegister(std::ostream& stream, int reg) const override;
Alexandre Rames5319def2014-10-23 10:03:10 +0100489
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100490 InstructionSet GetInstructionSet() const override {
Alexandre Rames5319def2014-10-23 10:03:10 +0100491 return InstructionSet::kArm64;
492 }
493
Vladimir Markoa0431112018-06-25 09:32:54 +0100494 const Arm64InstructionSetFeatures& GetInstructionSetFeatures() const;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000495
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100496 void Initialize() override {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100497 block_labels_.resize(GetGraph()->GetBlocks().size());
Alexandre Rames5319def2014-10-23 10:03:10 +0100498 }
499
Alexandre Rames68bd9b92016-07-15 17:41:13 +0100500 // We want to use the STP and LDP instructions to spill and restore registers for slow paths.
501 // These instructions can only encode offsets that are multiples of the register size accessed.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100502 uint32_t GetPreferredSlotsAlignment() const override { return vixl::aarch64::kXRegSizeInBytes; }
Alexandre Rames68bd9b92016-07-15 17:41:13 +0100503
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100504 JumpTableARM64* CreateJumpTable(HPackedSwitch* switch_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100505 jump_tables_.emplace_back(new (GetGraph()->GetAllocator()) JumpTableARM64(switch_instr));
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100506 return jump_tables_.back().get();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800507 }
508
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100509 void Finalize(CodeAllocator* allocator) override;
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000510
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000511 // Code generation helpers.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100512 void MoveConstant(vixl::aarch64::CPURegister destination, HConstant* constant);
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100513 void MoveConstant(Location destination, int32_t value) override;
514 void MoveLocation(Location dst, Location src, DataType::Type dst_type) override;
515 void AddLocationAsTemp(Location location, LocationSummary* locations) override;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100516
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100517 void Load(DataType::Type type,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100518 vixl::aarch64::CPURegister dst,
519 const vixl::aarch64::MemOperand& src);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100520 void Store(DataType::Type type,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100521 vixl::aarch64::CPURegister src,
522 const vixl::aarch64::MemOperand& dst);
Roland Levillain44015862016-01-22 11:47:17 +0000523 void LoadAcquire(HInstruction* instruction,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100524 vixl::aarch64::CPURegister dst,
525 const vixl::aarch64::MemOperand& src,
Roland Levillain44015862016-01-22 11:47:17 +0000526 bool needs_null_check);
Artem Serov914d7a82017-02-07 14:33:49 +0000527 void StoreRelease(HInstruction* instruction,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100528 DataType::Type type,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100529 vixl::aarch64::CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +0000530 const vixl::aarch64::MemOperand& dst,
531 bool needs_null_check);
Alexandre Rames67555f72014-11-18 10:55:16 +0000532
533 // Generate code to invoke a runtime entry point.
Calin Juravle175dc732015-08-25 15:42:32 +0100534 void InvokeRuntime(QuickEntrypointEnum entrypoint,
535 HInstruction* instruction,
536 uint32_t dex_pc,
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100537 SlowPathCode* slow_path = nullptr) override;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000538
Roland Levillaindec8f632016-07-22 17:10:06 +0100539 // Generate code to invoke a runtime entry point, but do not record
540 // PC-related information in a stack map.
541 void InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
542 HInstruction* instruction,
543 SlowPathCode* slow_path);
544
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100545 ParallelMoveResolverARM64* GetMoveResolver() override { return &move_resolver_; }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000546
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100547 bool NeedsTwoRegisters(DataType::Type type ATTRIBUTE_UNUSED) const override {
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000548 return false;
549 }
550
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000551 // Check if the desired_string_load_kind is supported. If it is, return it,
552 // otherwise return a fall-back kind that should be used instead.
553 HLoadString::LoadKind GetSupportedLoadStringKind(
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100554 HLoadString::LoadKind desired_string_load_kind) override;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000555
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100556 // Check if the desired_class_load_kind is supported. If it is, return it,
557 // otherwise return a fall-back kind that should be used instead.
558 HLoadClass::LoadKind GetSupportedLoadClassKind(
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100559 HLoadClass::LoadKind desired_class_load_kind) override;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100560
Vladimir Markodc151b22015-10-15 18:02:30 +0100561 // Check if the desired_dispatch_info is supported. If it is, return it,
562 // otherwise return a fall-back info that should be used instead.
563 HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
564 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +0100565 ArtMethod* method) override;
Vladimir Markodc151b22015-10-15 18:02:30 +0100566
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100567 void GenerateStaticOrDirectCall(
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100568 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) override;
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100569 void GenerateVirtualCall(
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100570 HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) override;
Andreas Gampe85b62f22015-09-09 13:15:38 -0700571
572 void MoveFromReturnRegister(Location trg ATTRIBUTE_UNUSED,
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100573 DataType::Type type ATTRIBUTE_UNUSED) override {
Andreas Gampe85b62f22015-09-09 13:15:38 -0700574 UNIMPLEMENTED(FATAL);
575 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800576
Vladimir Marko6fd16062018-06-26 11:02:04 +0100577 // Add a new boot image intrinsic patch for an instruction and return the label
578 // to be bound before the instruction. The instruction will be either the
579 // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
580 // to the associated ADRP patch label).
581 vixl::aarch64::Label* NewBootImageIntrinsicPatch(uint32_t intrinsic_data,
582 vixl::aarch64::Label* adrp_label = nullptr);
583
Vladimir Markob066d432018-01-03 13:14:37 +0000584 // Add a new boot image relocation patch for an instruction and return the label
585 // to be bound before the instruction. The instruction will be either the
586 // ADRP (pass `adrp_label = null`) or the LDR (pass `adrp_label` pointing
587 // to the associated ADRP patch label).
588 vixl::aarch64::Label* NewBootImageRelRoPatch(uint32_t boot_image_offset,
589 vixl::aarch64::Label* adrp_label = nullptr);
590
591 // Add a new boot image method patch for an instruction and return the label
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000592 // to be bound before the instruction. The instruction will be either the
593 // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
594 // to the associated ADRP patch label).
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000595 vixl::aarch64::Label* NewBootImageMethodPatch(MethodReference target_method,
596 vixl::aarch64::Label* adrp_label = nullptr);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000597
Vladimir Marko0eb882b2017-05-15 13:39:18 +0100598 // Add a new .bss entry method patch for an instruction and return
599 // the label to be bound before the instruction. The instruction will be
600 // either the ADRP (pass `adrp_label = null`) or the LDR (pass `adrp_label`
601 // pointing to the associated ADRP patch label).
602 vixl::aarch64::Label* NewMethodBssEntryPatch(MethodReference target_method,
603 vixl::aarch64::Label* adrp_label = nullptr);
604
Vladimir Markob066d432018-01-03 13:14:37 +0000605 // Add a new boot image type patch for an instruction and return the label
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100606 // to be bound before the instruction. The instruction will be either the
607 // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
608 // to the associated ADRP patch label).
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000609 vixl::aarch64::Label* NewBootImageTypePatch(const DexFile& dex_file,
610 dex::TypeIndex type_index,
611 vixl::aarch64::Label* adrp_label = nullptr);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100612
Vladimir Marko1998cd02017-01-13 13:02:58 +0000613 // Add a new .bss entry type patch for an instruction and return the label
614 // to be bound before the instruction. The instruction will be either the
615 // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
616 // to the associated ADRP patch label).
617 vixl::aarch64::Label* NewBssEntryTypePatch(const DexFile& dex_file,
618 dex::TypeIndex type_index,
619 vixl::aarch64::Label* adrp_label = nullptr);
620
Vladimir Markob066d432018-01-03 13:14:37 +0000621 // Add a new boot image string patch for an instruction and return the label
Vladimir Marko65979462017-05-19 17:25:12 +0100622 // to be bound before the instruction. The instruction will be either the
623 // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
624 // to the associated ADRP patch label).
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000625 vixl::aarch64::Label* NewBootImageStringPatch(const DexFile& dex_file,
626 dex::StringIndex string_index,
627 vixl::aarch64::Label* adrp_label = nullptr);
Vladimir Marko65979462017-05-19 17:25:12 +0100628
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100629 // Add a new .bss entry string patch for an instruction and return the label
630 // to be bound before the instruction. The instruction will be either the
631 // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
632 // to the associated ADRP patch label).
633 vixl::aarch64::Label* NewStringBssEntryPatch(const DexFile& dex_file,
634 dex::StringIndex string_index,
635 vixl::aarch64::Label* adrp_label = nullptr);
636
Vladimir Markof6675082019-05-17 12:05:28 +0100637 // Emit the BL instruction for entrypoint thunk call and record the associated patch for AOT.
638 void EmitEntrypointThunkCall(ThreadOffset64 entrypoint_offset);
639
Vladimir Marko966b46f2018-08-03 10:20:19 +0000640 // Emit the CBNZ instruction for baker read barrier and record
641 // the associated patch for AOT or slow path for JIT.
642 void EmitBakerReadBarrierCbnz(uint32_t custom_data);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000643
Scott Wakeling97c72b72016-06-24 16:19:36 +0100644 vixl::aarch64::Literal<uint32_t>* DeduplicateBootImageAddressLiteral(uint64_t address);
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000645 vixl::aarch64::Literal<uint32_t>* DeduplicateJitStringLiteral(const DexFile& dex_file,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +0000646 dex::StringIndex string_index,
647 Handle<mirror::String> handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +0000648 vixl::aarch64::Literal<uint32_t>* DeduplicateJitClassLiteral(const DexFile& dex_file,
649 dex::TypeIndex string_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +0000650 Handle<mirror::Class> handle);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000651
Vladimir Markoaad75c62016-10-03 08:46:48 +0000652 void EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label, vixl::aarch64::Register reg);
653 void EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
654 vixl::aarch64::Register out,
655 vixl::aarch64::Register base);
656 void EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
657 vixl::aarch64::Register out,
658 vixl::aarch64::Register base);
659
Vladimir Marko6fd16062018-06-26 11:02:04 +0100660 void LoadBootImageAddress(vixl::aarch64::Register reg, uint32_t boot_image_reference);
661 void AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, uint32_t boot_image_offset);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100662
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100663 void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) override;
664 bool NeedsThunkCode(const linker::LinkerPatch& patch) const override;
Vladimir Markoca1e0382018-04-11 09:58:41 +0000665 void EmitThunkCode(const linker::LinkerPatch& patch,
666 /*out*/ ArenaVector<uint8_t>* code,
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100667 /*out*/ std::string* debug_name) override;
Vladimir Marko58155012015-08-19 12:49:41 +0000668
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100669 void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) override;
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000670
Vladimir Markoca1e0382018-04-11 09:58:41 +0000671 // Generate a GC root reference load:
672 //
673 // root <- *(obj + offset)
674 //
675 // while honoring read barriers based on read_barrier_option.
676 void GenerateGcRootFieldLoad(HInstruction* instruction,
677 Location root,
678 vixl::aarch64::Register obj,
679 uint32_t offset,
680 vixl::aarch64::Label* fixup_label,
681 ReadBarrierOption read_barrier_option);
Vladimir Marko94796f82018-08-08 15:15:33 +0100682 // Generate MOV for the `old_value` in UnsafeCASObject and mark it with Baker read barrier.
683 void GenerateUnsafeCasOldValueMovWithBakerReadBarrier(vixl::aarch64::Register marked,
684 vixl::aarch64::Register old_value);
Roland Levillain44015862016-01-22 11:47:17 +0000685 // Fast path implementation of ReadBarrier::Barrier for a heap
686 // reference field load when Baker's read barriers are used.
Vladimir Marko248141f2018-08-10 10:40:07 +0100687 // Overload suitable for Unsafe.getObject/-Volatile() intrinsic.
688 void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
689 Location ref,
690 vixl::aarch64::Register obj,
691 const vixl::aarch64::MemOperand& src,
692 bool needs_null_check,
693 bool use_load_acquire);
694 // Fast path implementation of ReadBarrier::Barrier for a heap
695 // reference field load when Baker's read barriers are used.
Roland Levillain44015862016-01-22 11:47:17 +0000696 void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
697 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100698 vixl::aarch64::Register obj,
Roland Levillain44015862016-01-22 11:47:17 +0000699 uint32_t offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000700 Location maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +0000701 bool needs_null_check,
702 bool use_load_acquire);
703 // Fast path implementation of ReadBarrier::Barrier for a heap
704 // reference array load when Baker's read barriers are used.
Artem Serov0806f582018-10-11 20:14:20 +0100705 void GenerateArrayLoadWithBakerReadBarrier(HArrayGet* instruction,
706 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100707 vixl::aarch64::Register obj,
Roland Levillain44015862016-01-22 11:47:17 +0000708 uint32_t data_offset,
709 Location index,
Roland Levillain44015862016-01-22 11:47:17 +0000710 bool needs_null_check);
Roland Levillainff487002017-03-07 16:50:01 +0000711
Roland Levillain2b03a1f2017-06-06 16:09:59 +0100712 // Emit code checking the status of the Marking Register, and
713 // aborting the program if MR does not match the value stored in the
714 // art::Thread object. Code is only emitted in debug mode and if
715 // CompilerOptions::EmitRunTimeChecksInDebugMode returns true.
716 //
717 // Argument `code` is used to identify the different occurrences of
718 // MaybeGenerateMarkingRegisterCheck in the code generator, and is
719 // passed to the BRK instruction.
720 //
721 // If `temp_loc` is a valid location, it is expected to be a
722 // register and will be used as a temporary to generate code;
723 // otherwise, a temporary will be fetched from the core register
724 // scratch pool.
725 virtual void MaybeGenerateMarkingRegisterCheck(int code,
726 Location temp_loc = Location::NoLocation());
727
Roland Levillain44015862016-01-22 11:47:17 +0000728 // Generate a read barrier for a heap reference within `instruction`
729 // using a slow path.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000730 //
731 // A read barrier for an object reference read from the heap is
732 // implemented as a call to the artReadBarrierSlow runtime entry
733 // point, which is passed the values in locations `ref`, `obj`, and
734 // `offset`:
735 //
736 // mirror::Object* artReadBarrierSlow(mirror::Object* ref,
737 // mirror::Object* obj,
738 // uint32_t offset);
739 //
740 // The `out` location contains the value returned by
741 // artReadBarrierSlow.
742 //
743 // When `index` is provided (i.e. for array accesses), the offset
744 // value passed to artReadBarrierSlow is adjusted to take `index`
745 // into account.
Roland Levillain44015862016-01-22 11:47:17 +0000746 void GenerateReadBarrierSlow(HInstruction* instruction,
747 Location out,
748 Location ref,
749 Location obj,
750 uint32_t offset,
751 Location index = Location::NoLocation());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000752
Roland Levillain44015862016-01-22 11:47:17 +0000753 // If read barriers are enabled, generate a read barrier for a heap
754 // reference using a slow path. If heap poisoning is enabled, also
755 // unpoison the reference in `out`.
756 void MaybeGenerateReadBarrierSlow(HInstruction* instruction,
757 Location out,
758 Location ref,
759 Location obj,
760 uint32_t offset,
761 Location index = Location::NoLocation());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000762
Roland Levillain44015862016-01-22 11:47:17 +0000763 // Generate a read barrier for a GC root within `instruction` using
764 // a slow path.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000765 //
766 // A read barrier for an object reference GC root is implemented as
767 // a call to the artReadBarrierForRootSlow runtime entry point,
768 // which is passed the value in location `root`:
769 //
770 // mirror::Object* artReadBarrierForRootSlow(GcRoot<mirror::Object>* root);
771 //
772 // The `out` location contains the value returned by
773 // artReadBarrierForRootSlow.
Roland Levillain44015862016-01-22 11:47:17 +0000774 void GenerateReadBarrierForRootSlow(HInstruction* instruction, Location out, Location root);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000775
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100776 void GenerateNop() override;
David Srbeckyc7098ff2016-02-09 14:30:11 +0000777
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100778 void GenerateImplicitNullCheck(HNullCheck* instruction) override;
779 void GenerateExplicitNullCheck(HNullCheck* instruction) override;
Calin Juravle2ae48182016-03-16 14:05:09 +0000780
Evgeny Astigeevich98416bf2019-09-09 14:52:12 +0100781 void MaybeRecordImplicitNullCheck(HInstruction* instr) final {
782 // The function must be only called within special scopes
783 // (EmissionCheckScope, ExactAssemblyScope) which prevent generation of
784 // veneer/literal pools by VIXL assembler.
785 CHECK_EQ(GetVIXLAssembler()->ArePoolsBlocked(), true)
786 << "The function must only be called within EmissionCheckScope or ExactAssemblyScope";
787 CodeGenerator::MaybeRecordImplicitNullCheck(instr);
788 }
789
Alexandre Rames5319def2014-10-23 10:03:10 +0100790 private:
Vladimir Markoca1e0382018-04-11 09:58:41 +0000791 // Encoding of thunk type and data for link-time generated thunks for Baker read barriers.
792
793 enum class BakerReadBarrierKind : uint8_t {
Vladimir Marko0ecac682018-08-07 10:40:38 +0100794 kField, // Field get or array get with constant offset (i.e. constant index).
795 kAcquire, // Volatile field get.
796 kArray, // Array get with index in register.
797 kGcRoot, // GC root load.
Vladimir Markoca1e0382018-04-11 09:58:41 +0000798 kLast = kGcRoot
799 };
800
801 static constexpr uint32_t kBakerReadBarrierInvalidEncodedReg = /* sp/zr is invalid */ 31u;
802
803 static constexpr size_t kBitsForBakerReadBarrierKind =
804 MinimumBitsToStore(static_cast<size_t>(BakerReadBarrierKind::kLast));
805 static constexpr size_t kBakerReadBarrierBitsForRegister =
806 MinimumBitsToStore(kBakerReadBarrierInvalidEncodedReg);
807 using BakerReadBarrierKindField =
808 BitField<BakerReadBarrierKind, 0, kBitsForBakerReadBarrierKind>;
809 using BakerReadBarrierFirstRegField =
810 BitField<uint32_t, kBitsForBakerReadBarrierKind, kBakerReadBarrierBitsForRegister>;
811 using BakerReadBarrierSecondRegField =
812 BitField<uint32_t,
813 kBitsForBakerReadBarrierKind + kBakerReadBarrierBitsForRegister,
814 kBakerReadBarrierBitsForRegister>;
815
816 static void CheckValidReg(uint32_t reg) {
817 DCHECK(reg < vixl::aarch64::lr.GetCode() &&
818 reg != vixl::aarch64::ip0.GetCode() &&
819 reg != vixl::aarch64::ip1.GetCode()) << reg;
820 }
821
822 static inline uint32_t EncodeBakerReadBarrierFieldData(uint32_t base_reg, uint32_t holder_reg) {
823 CheckValidReg(base_reg);
824 CheckValidReg(holder_reg);
825 return BakerReadBarrierKindField::Encode(BakerReadBarrierKind::kField) |
826 BakerReadBarrierFirstRegField::Encode(base_reg) |
827 BakerReadBarrierSecondRegField::Encode(holder_reg);
828 }
829
Vladimir Marko0ecac682018-08-07 10:40:38 +0100830 static inline uint32_t EncodeBakerReadBarrierAcquireData(uint32_t base_reg, uint32_t holder_reg) {
831 CheckValidReg(base_reg);
832 CheckValidReg(holder_reg);
833 DCHECK_NE(base_reg, holder_reg);
834 return BakerReadBarrierKindField::Encode(BakerReadBarrierKind::kAcquire) |
835 BakerReadBarrierFirstRegField::Encode(base_reg) |
836 BakerReadBarrierSecondRegField::Encode(holder_reg);
837 }
838
Vladimir Markoca1e0382018-04-11 09:58:41 +0000839 static inline uint32_t EncodeBakerReadBarrierArrayData(uint32_t base_reg) {
840 CheckValidReg(base_reg);
841 return BakerReadBarrierKindField::Encode(BakerReadBarrierKind::kArray) |
842 BakerReadBarrierFirstRegField::Encode(base_reg) |
843 BakerReadBarrierSecondRegField::Encode(kBakerReadBarrierInvalidEncodedReg);
844 }
845
846 static inline uint32_t EncodeBakerReadBarrierGcRootData(uint32_t root_reg) {
847 CheckValidReg(root_reg);
848 return BakerReadBarrierKindField::Encode(BakerReadBarrierKind::kGcRoot) |
849 BakerReadBarrierFirstRegField::Encode(root_reg) |
850 BakerReadBarrierSecondRegField::Encode(kBakerReadBarrierInvalidEncodedReg);
851 }
852
853 void CompileBakerReadBarrierThunk(Arm64Assembler& assembler,
854 uint32_t encoded_data,
855 /*out*/ std::string* debug_name);
856
Scott Wakeling97c72b72016-06-24 16:19:36 +0100857 using Uint64ToLiteralMap = ArenaSafeMap<uint64_t, vixl::aarch64::Literal<uint64_t>*>;
858 using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, vixl::aarch64::Literal<uint32_t>*>;
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000859 using StringToLiteralMap = ArenaSafeMap<StringReference,
860 vixl::aarch64::Literal<uint32_t>*,
861 StringReferenceValueComparator>;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +0000862 using TypeToLiteralMap = ArenaSafeMap<TypeReference,
863 vixl::aarch64::Literal<uint32_t>*,
864 TypeReferenceValueComparator>;
Vladimir Marko58155012015-08-19 12:49:41 +0000865
Vladimir Marko0eb882b2017-05-15 13:39:18 +0100866 vixl::aarch64::Literal<uint32_t>* DeduplicateUint32Literal(uint32_t value);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100867 vixl::aarch64::Literal<uint64_t>* DeduplicateUint64Literal(uint64_t value);
Vladimir Marko58155012015-08-19 12:49:41 +0000868
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000869 // The PcRelativePatchInfo is used for PC-relative addressing of methods/strings/types,
870 // whether through .data.bimg.rel.ro, .bss, or directly in the boot image.
871 struct PcRelativePatchInfo : PatchInfo<vixl::aarch64::Label> {
872 PcRelativePatchInfo(const DexFile* dex_file, uint32_t off_or_idx)
873 : PatchInfo<vixl::aarch64::Label>(dex_file, off_or_idx), pc_insn_label() { }
Vladimir Marko58155012015-08-19 12:49:41 +0000874
Scott Wakeling97c72b72016-06-24 16:19:36 +0100875 vixl::aarch64::Label* pc_insn_label;
Vladimir Marko58155012015-08-19 12:49:41 +0000876 };
877
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000878 struct BakerReadBarrierPatchInfo {
879 explicit BakerReadBarrierPatchInfo(uint32_t data) : label(), custom_data(data) { }
880
881 vixl::aarch64::Label label;
882 uint32_t custom_data;
883 };
884
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000885 vixl::aarch64::Label* NewPcRelativePatch(const DexFile* dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100886 uint32_t offset_or_index,
887 vixl::aarch64::Label* adrp_label,
888 ArenaDeque<PcRelativePatchInfo>* patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000889
Zheng Xu3927c8b2015-11-18 17:46:25 +0800890 void EmitJumpTables();
891
Vladimir Markod8dbc8d2017-09-20 13:37:47 +0100892 template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +0000893 static void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +0100894 ArenaVector<linker::LinkerPatch>* linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000895
Alexandre Rames5319def2014-10-23 10:03:10 +0100896 // Labels for each block that will be compiled.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100897 // We use a deque so that the `vixl::aarch64::Label` objects do not move in memory.
898 ArenaDeque<vixl::aarch64::Label> block_labels_; // Indexed by block id.
899 vixl::aarch64::Label frame_entry_label_;
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100900 ArenaVector<std::unique_ptr<JumpTableARM64>> jump_tables_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100901
902 LocationsBuilderARM64 location_builder_;
903 InstructionCodeGeneratorARM64 instruction_visitor_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000904 ParallelMoveResolverARM64 move_resolver_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100905 Arm64Assembler assembler_;
906
Vladimir Marko2d06e022019-07-08 15:45:19 +0100907 // PC-relative method patch info for kBootImageLinkTimePcRelative.
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000908 ArenaDeque<PcRelativePatchInfo> boot_image_method_patches_;
Vladimir Marko0eb882b2017-05-15 13:39:18 +0100909 // PC-relative method patch info for kBssEntry.
910 ArenaDeque<PcRelativePatchInfo> method_bss_entry_patches_;
Vladimir Marko1998cd02017-01-13 13:02:58 +0000911 // PC-relative type patch info for kBootImageLinkTimePcRelative.
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000912 ArenaDeque<PcRelativePatchInfo> boot_image_type_patches_;
Vladimir Marko1998cd02017-01-13 13:02:58 +0000913 // PC-relative type patch info for kBssEntry.
914 ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
Vladimir Markoe47f60c2018-02-21 13:43:28 +0000915 // PC-relative String patch info for kBootImageLinkTimePcRelative.
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000916 ArenaDeque<PcRelativePatchInfo> boot_image_string_patches_;
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100917 // PC-relative String patch info for kBssEntry.
918 ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
Vladimir Marko2d06e022019-07-08 15:45:19 +0100919 // PC-relative patch info for IntrinsicObjects for the boot image,
920 // and for method/type/string patches for kBootImageRelRo otherwise.
921 ArenaDeque<PcRelativePatchInfo> boot_image_other_patches_;
Vladimir Markof6675082019-05-17 12:05:28 +0100922 // Patch info for calls to entrypoint dispatch thunks. Used for slow paths.
923 ArenaDeque<PatchInfo<vixl::aarch64::Label>> call_entrypoint_patches_;
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000924 // Baker read barrier patch info.
925 ArenaDeque<BakerReadBarrierPatchInfo> baker_read_barrier_patches_;
Vladimir Marko58155012015-08-19 12:49:41 +0000926
Vladimir Markof6675082019-05-17 12:05:28 +0100927 // Deduplication map for 32-bit literals, used for JIT for boot image addresses.
928 Uint32ToLiteralMap uint32_literals_;
929 // Deduplication map for 64-bit literals, used for JIT for method address or method code.
930 Uint64ToLiteralMap uint64_literals_;
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000931 // Patches for string literals in JIT compiled code.
932 StringToLiteralMap jit_string_patches_;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +0000933 // Patches for class literals in JIT compiled code.
934 TypeToLiteralMap jit_class_patches_;
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000935
Vladimir Marko966b46f2018-08-03 10:20:19 +0000936 // Baker read barrier slow paths, mapping custom data (uint32_t) to label.
937 // Wrap the label to work around vixl::aarch64::Label being non-copyable
938 // and non-moveable and as such unusable in ArenaSafeMap<>.
939 struct LabelWrapper {
940 LabelWrapper(const LabelWrapper& src)
941 : label() {
942 DCHECK(!src.label.IsLinked() && !src.label.IsBound());
943 }
944 LabelWrapper() = default;
945 vixl::aarch64::Label label;
946 };
947 ArenaSafeMap<uint32_t, LabelWrapper> jit_baker_read_barrier_slow_paths_;
948
Vladimir Markoca1e0382018-04-11 09:58:41 +0000949 friend class linker::Arm64RelativePatcherTest;
Alexandre Rames5319def2014-10-23 10:03:10 +0100950 DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARM64);
951};
952
Alexandre Rames3e69f162014-12-10 10:36:50 +0000953inline Arm64Assembler* ParallelMoveResolverARM64::GetAssembler() const {
954 return codegen_->GetAssembler();
955}
956
Alexandre Rames5319def2014-10-23 10:03:10 +0100957} // namespace arm64
958} // namespace art
959
960#endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM64_H_