blob: d5b739bd7c6d14439d4b1be3b9575993c9ab67ef [file] [log] [blame]
Scott Wakelingfe885462016-09-22 10:24:38 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_
18#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_
19
Artem Serovd4cc5b22016-11-04 11:19:09 +000020#include "base/enums.h"
21#include "code_generator.h"
Artem Serovcfbe9132016-10-14 15:58:56 +010022#include "common_arm.h"
David Sehr312f3b22018-03-19 08:39:26 -070023#include "dex/string_reference.h"
24#include "dex/type_reference.h"
Artem Serovd4cc5b22016-11-04 11:19:09 +000025#include "driver/compiler_options.h"
26#include "nodes.h"
Artem Serovd4cc5b22016-11-04 11:19:09 +000027#include "parallel_move_resolver.h"
Scott Wakelingfe885462016-09-22 10:24:38 +010028#include "utils/arm/assembler_arm_vixl.h"
29
30// TODO(VIXL): make vixl clean wrt -Wshadow.
31#pragma GCC diagnostic push
32#pragma GCC diagnostic ignored "-Wshadow"
33#include "aarch32/constants-aarch32.h"
34#include "aarch32/instructions-aarch32.h"
35#include "aarch32/macro-assembler-aarch32.h"
36#pragma GCC diagnostic pop
37
Scott Wakelingfe885462016-09-22 10:24:38 +010038namespace art {
Vladimir Markoca1e0382018-04-11 09:58:41 +000039
40namespace linker {
41class Thumb2RelativePatcherTest;
42} // namespace linker
43
Scott Wakelingfe885462016-09-22 10:24:38 +010044namespace arm {
45
Roland Levillainba650a42017-03-06 13:52:32 +000046// This constant is used as an approximate margin when emission of veneer and literal pools
47// must be blocked.
48static constexpr int kMaxMacroInstructionSizeInBytes =
49 15 * vixl::aarch32::kMaxInstructionSizeInBytes;
50
Scott Wakelinga7812ae2016-10-17 10:03:36 +010051static const vixl::aarch32::Register kParameterCoreRegistersVIXL[] = {
52 vixl::aarch32::r1,
53 vixl::aarch32::r2,
54 vixl::aarch32::r3
55};
Artem Serovd4cc5b22016-11-04 11:19:09 +000056static const size_t kParameterCoreRegistersLengthVIXL = arraysize(kParameterCoreRegistersVIXL);
Scott Wakelinga7812ae2016-10-17 10:03:36 +010057static const vixl::aarch32::SRegister kParameterFpuRegistersVIXL[] = {
58 vixl::aarch32::s0,
59 vixl::aarch32::s1,
60 vixl::aarch32::s2,
61 vixl::aarch32::s3,
62 vixl::aarch32::s4,
63 vixl::aarch32::s5,
64 vixl::aarch32::s6,
65 vixl::aarch32::s7,
66 vixl::aarch32::s8,
67 vixl::aarch32::s9,
68 vixl::aarch32::s10,
69 vixl::aarch32::s11,
70 vixl::aarch32::s12,
71 vixl::aarch32::s13,
72 vixl::aarch32::s14,
73 vixl::aarch32::s15
74};
Artem Serovd4cc5b22016-11-04 11:19:09 +000075static const size_t kParameterFpuRegistersLengthVIXL = arraysize(kParameterFpuRegistersVIXL);
Scott Wakelinga7812ae2016-10-17 10:03:36 +010076
Scott Wakelingfe885462016-09-22 10:24:38 +010077static const vixl::aarch32::Register kMethodRegister = vixl::aarch32::r0;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010078
Scott Wakelingfe885462016-09-22 10:24:38 +010079static const vixl::aarch32::Register kCoreAlwaysSpillRegister = vixl::aarch32::r5;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010080
Roland Levillain6d729a72017-06-30 18:34:01 +010081// Callee saves core registers r5, r6, r7, r8 (except when emitting Baker
82// read barriers, where it is used as Marking Register), r10, r11, and lr.
Scott Wakelinga7812ae2016-10-17 10:03:36 +010083static const vixl::aarch32::RegisterList kCoreCalleeSaves = vixl::aarch32::RegisterList::Union(
84 vixl::aarch32::RegisterList(vixl::aarch32::r5,
85 vixl::aarch32::r6,
Roland Levillain6d729a72017-06-30 18:34:01 +010086 vixl::aarch32::r7),
87 // Do not consider r8 as a callee-save register with Baker read barriers.
88 ((kEmitCompilerReadBarrier && kUseBakerReadBarrier)
89 ? vixl::aarch32::RegisterList()
90 : vixl::aarch32::RegisterList(vixl::aarch32::r8)),
Scott Wakelinga7812ae2016-10-17 10:03:36 +010091 vixl::aarch32::RegisterList(vixl::aarch32::r10,
92 vixl::aarch32::r11,
93 vixl::aarch32::lr));
94
95// Callee saves FP registers s16 to s31 inclusive.
Scott Wakelingfe885462016-09-22 10:24:38 +010096static const vixl::aarch32::SRegisterList kFpuCalleeSaves =
97 vixl::aarch32::SRegisterList(vixl::aarch32::s16, 16);
98
Scott Wakelinga7812ae2016-10-17 10:03:36 +010099static const vixl::aarch32::Register kRuntimeParameterCoreRegistersVIXL[] = {
100 vixl::aarch32::r0,
101 vixl::aarch32::r1,
102 vixl::aarch32::r2,
103 vixl::aarch32::r3
104};
105static const size_t kRuntimeParameterCoreRegistersLengthVIXL =
Artem Serovd4cc5b22016-11-04 11:19:09 +0000106 arraysize(kRuntimeParameterCoreRegistersVIXL);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100107static const vixl::aarch32::SRegister kRuntimeParameterFpuRegistersVIXL[] = {
108 vixl::aarch32::s0,
109 vixl::aarch32::s1,
110 vixl::aarch32::s2,
111 vixl::aarch32::s3
112};
113static const size_t kRuntimeParameterFpuRegistersLengthVIXL =
Artem Serovd4cc5b22016-11-04 11:19:09 +0000114 arraysize(kRuntimeParameterFpuRegistersVIXL);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100115
116class LoadClassSlowPathARMVIXL;
Scott Wakelingfe885462016-09-22 10:24:38 +0100117class CodeGeneratorARMVIXL;
118
Artem Serovc5fcb442016-12-02 19:19:58 +0000119using VIXLInt32Literal = vixl::aarch32::Literal<int32_t>;
120using VIXLUInt32Literal = vixl::aarch32::Literal<uint32_t>;
121
Artem Serov551b28f2016-10-18 19:11:30 +0100122class JumpTableARMVIXL : public DeletableArenaObject<kArenaAllocSwitchTable> {
123 public:
124 explicit JumpTableARMVIXL(HPackedSwitch* switch_instr)
Artem Serov09a940d2016-11-11 16:15:11 +0000125 : switch_instr_(switch_instr),
126 table_start_(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100127 bb_addresses_(switch_instr->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Artem Serov09a940d2016-11-11 16:15:11 +0000128 uint32_t num_entries = switch_instr_->GetNumEntries();
129 for (uint32_t i = 0; i < num_entries; i++) {
Artem Serovc5fcb442016-12-02 19:19:58 +0000130 VIXLInt32Literal *lit = new VIXLInt32Literal(0, vixl32::RawLiteral::kManuallyPlaced);
Artem Serov09a940d2016-11-11 16:15:11 +0000131 bb_addresses_.emplace_back(lit);
132 }
133 }
Artem Serov551b28f2016-10-18 19:11:30 +0100134
135 vixl::aarch32::Label* GetTableStartLabel() { return &table_start_; }
136
137 void EmitTable(CodeGeneratorARMVIXL* codegen);
Artem Serov09a940d2016-11-11 16:15:11 +0000138 void FixTable(CodeGeneratorARMVIXL* codegen);
Artem Serov551b28f2016-10-18 19:11:30 +0100139
140 private:
141 HPackedSwitch* const switch_instr_;
142 vixl::aarch32::Label table_start_;
Artem Serovc5fcb442016-12-02 19:19:58 +0000143 ArenaVector<std::unique_ptr<VIXLInt32Literal>> bb_addresses_;
Artem Serov551b28f2016-10-18 19:11:30 +0100144
145 DISALLOW_COPY_AND_ASSIGN(JumpTableARMVIXL);
146};
147
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100148class InvokeRuntimeCallingConventionARMVIXL
149 : public CallingConvention<vixl::aarch32::Register, vixl::aarch32::SRegister> {
150 public:
151 InvokeRuntimeCallingConventionARMVIXL()
152 : CallingConvention(kRuntimeParameterCoreRegistersVIXL,
153 kRuntimeParameterCoreRegistersLengthVIXL,
154 kRuntimeParameterFpuRegistersVIXL,
155 kRuntimeParameterFpuRegistersLengthVIXL,
156 kArmPointerSize) {}
157
158 private:
159 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConventionARMVIXL);
160};
161
162class InvokeDexCallingConventionARMVIXL
163 : public CallingConvention<vixl::aarch32::Register, vixl::aarch32::SRegister> {
164 public:
165 InvokeDexCallingConventionARMVIXL()
166 : CallingConvention(kParameterCoreRegistersVIXL,
167 kParameterCoreRegistersLengthVIXL,
168 kParameterFpuRegistersVIXL,
169 kParameterFpuRegistersLengthVIXL,
170 kArmPointerSize) {}
171
172 private:
173 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionARMVIXL);
174};
175
Artem Serovd4cc5b22016-11-04 11:19:09 +0000176class InvokeDexCallingConventionVisitorARMVIXL : public InvokeDexCallingConventionVisitor {
177 public:
178 InvokeDexCallingConventionVisitorARMVIXL() {}
179 virtual ~InvokeDexCallingConventionVisitorARMVIXL() {}
180
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100181 Location GetNextLocation(DataType::Type type) OVERRIDE;
182 Location GetReturnLocation(DataType::Type type) const OVERRIDE;
Artem Serovd4cc5b22016-11-04 11:19:09 +0000183 Location GetMethodLocation() const OVERRIDE;
184
185 private:
186 InvokeDexCallingConventionARMVIXL calling_convention;
187 uint32_t double_index_ = 0;
188
189 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorARMVIXL);
190};
191
Artem Serovcfbe9132016-10-14 15:58:56 +0100192class FieldAccessCallingConventionARMVIXL : public FieldAccessCallingConvention {
193 public:
194 FieldAccessCallingConventionARMVIXL() {}
195
196 Location GetObjectLocation() const OVERRIDE {
197 return helpers::LocationFrom(vixl::aarch32::r1);
198 }
199 Location GetFieldIndexLocation() const OVERRIDE {
200 return helpers::LocationFrom(vixl::aarch32::r0);
201 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100202 Location GetReturnLocation(DataType::Type type) const OVERRIDE {
203 return DataType::Is64BitType(type)
Artem Serovcfbe9132016-10-14 15:58:56 +0100204 ? helpers::LocationFrom(vixl::aarch32::r0, vixl::aarch32::r1)
205 : helpers::LocationFrom(vixl::aarch32::r0);
206 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100207 Location GetSetValueLocation(DataType::Type type, bool is_instance) const OVERRIDE {
208 return DataType::Is64BitType(type)
Nicolas Geoffraya72859d2017-01-26 22:47:27 +0000209 ? helpers::LocationFrom(vixl::aarch32::r2, vixl::aarch32::r3)
Artem Serovcfbe9132016-10-14 15:58:56 +0100210 : (is_instance
211 ? helpers::LocationFrom(vixl::aarch32::r2)
212 : helpers::LocationFrom(vixl::aarch32::r1));
213 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100214 Location GetFpuLocation(DataType::Type type) const OVERRIDE {
215 return DataType::Is64BitType(type)
Artem Serovcfbe9132016-10-14 15:58:56 +0100216 ? helpers::LocationFrom(vixl::aarch32::s0, vixl::aarch32::s1)
217 : helpers::LocationFrom(vixl::aarch32::s0);
218 }
219
220 private:
221 DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConventionARMVIXL);
222};
223
Scott Wakelingfe885462016-09-22 10:24:38 +0100224class SlowPathCodeARMVIXL : public SlowPathCode {
225 public:
226 explicit SlowPathCodeARMVIXL(HInstruction* instruction)
227 : SlowPathCode(instruction), entry_label_(), exit_label_() {}
228
229 vixl::aarch32::Label* GetEntryLabel() { return &entry_label_; }
230 vixl::aarch32::Label* GetExitLabel() { return &exit_label_; }
231
232 void SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) OVERRIDE;
233 void RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) OVERRIDE;
234
235 private:
236 vixl::aarch32::Label entry_label_;
237 vixl::aarch32::Label exit_label_;
238
239 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARMVIXL);
240};
241
242class ParallelMoveResolverARMVIXL : public ParallelMoveResolverWithSwap {
243 public:
244 ParallelMoveResolverARMVIXL(ArenaAllocator* allocator, CodeGeneratorARMVIXL* codegen)
245 : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {}
246
247 void EmitMove(size_t index) OVERRIDE;
248 void EmitSwap(size_t index) OVERRIDE;
249 void SpillScratch(int reg) OVERRIDE;
250 void RestoreScratch(int reg) OVERRIDE;
251
252 ArmVIXLAssembler* GetAssembler() const;
253
254 private:
Alexandre Rames9c19bd62016-10-24 11:50:32 +0100255 void Exchange(vixl32::Register reg, int mem);
Scott Wakelingfe885462016-09-22 10:24:38 +0100256 void Exchange(int mem1, int mem2);
257
258 CodeGeneratorARMVIXL* const codegen_;
259
260 DISALLOW_COPY_AND_ASSIGN(ParallelMoveResolverARMVIXL);
261};
262
Scott Wakelingfe885462016-09-22 10:24:38 +0100263class LocationsBuilderARMVIXL : public HGraphVisitor {
264 public:
265 LocationsBuilderARMVIXL(HGraph* graph, CodeGeneratorARMVIXL* codegen)
266 : HGraphVisitor(graph), codegen_(codegen) {}
267
Artem Serovd4cc5b22016-11-04 11:19:09 +0000268#define DECLARE_VISIT_INSTRUCTION(name, super) \
269 void Visit##name(H##name* instr) OVERRIDE;
Scott Wakelingfe885462016-09-22 10:24:38 +0100270
Artem Serovd4cc5b22016-11-04 11:19:09 +0000271 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
272 FOR_EACH_CONCRETE_INSTRUCTION_ARM(DECLARE_VISIT_INSTRUCTION)
273 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(DECLARE_VISIT_INSTRUCTION)
Scott Wakelingfe885462016-09-22 10:24:38 +0100274
Artem Serovd4cc5b22016-11-04 11:19:09 +0000275#undef DECLARE_VISIT_INSTRUCTION
276
277 void VisitInstruction(HInstruction* instruction) OVERRIDE {
278 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
279 << " (id " << instruction->GetId() << ")";
Scott Wakelingfe885462016-09-22 10:24:38 +0100280 }
281
Artem Serovd4cc5b22016-11-04 11:19:09 +0000282 private:
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100283 void HandleInvoke(HInvoke* invoke);
Artem Serov02109dd2016-09-23 17:17:54 +0100284 void HandleBitwiseOperation(HBinaryOperation* operation, Opcode opcode);
Scott Wakelingfe885462016-09-22 10:24:38 +0100285 void HandleCondition(HCondition* condition);
Artem Serov02109dd2016-09-23 17:17:54 +0100286 void HandleIntegerRotate(LocationSummary* locations);
287 void HandleLongRotate(LocationSummary* locations);
288 void HandleShift(HBinaryOperation* operation);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100289 void HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info);
290 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
Scott Wakelingfe885462016-09-22 10:24:38 +0100291
Alexandre Rames9c19bd62016-10-24 11:50:32 +0100292 Location ArithmeticZeroOrFpuRegister(HInstruction* input);
Artem Serov02109dd2016-09-23 17:17:54 +0100293 Location ArmEncodableConstantOrRegister(HInstruction* constant, Opcode opcode);
294 bool CanEncodeConstantAsImmediate(HConstant* input_cst, Opcode opcode);
Alexandre Rames9c19bd62016-10-24 11:50:32 +0100295
Scott Wakelingfe885462016-09-22 10:24:38 +0100296 CodeGeneratorARMVIXL* const codegen_;
Artem Serovd4cc5b22016-11-04 11:19:09 +0000297 InvokeDexCallingConventionVisitorARMVIXL parameter_visitor_;
Scott Wakelingfe885462016-09-22 10:24:38 +0100298
299 DISALLOW_COPY_AND_ASSIGN(LocationsBuilderARMVIXL);
300};
301
302class InstructionCodeGeneratorARMVIXL : public InstructionCodeGenerator {
303 public:
304 InstructionCodeGeneratorARMVIXL(HGraph* graph, CodeGeneratorARMVIXL* codegen);
305
Artem Serovd4cc5b22016-11-04 11:19:09 +0000306#define DECLARE_VISIT_INSTRUCTION(name, super) \
307 void Visit##name(H##name* instr) OVERRIDE;
Scott Wakelingfe885462016-09-22 10:24:38 +0100308
Artem Serovd4cc5b22016-11-04 11:19:09 +0000309 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
310 FOR_EACH_CONCRETE_INSTRUCTION_ARM(DECLARE_VISIT_INSTRUCTION)
311 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(DECLARE_VISIT_INSTRUCTION)
312
313#undef DECLARE_VISIT_INSTRUCTION
314
315 void VisitInstruction(HInstruction* instruction) OVERRIDE {
316 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
317 << " (id " << instruction->GetId() << ")";
318 }
Scott Wakelingfe885462016-09-22 10:24:38 +0100319
320 ArmVIXLAssembler* GetAssembler() const { return assembler_; }
xueliang.zhongf51bc622016-11-04 09:23:32 +0000321 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); }
Scott Wakelingfe885462016-09-22 10:24:38 +0100322
323 private:
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100324 // Generate code for the given suspend check. If not null, `successor`
325 // is the block to branch to if the suspend check is not needed, and after
326 // the suspend call.
Scott Wakelingfe885462016-09-22 10:24:38 +0100327 void GenerateSuspendCheck(HSuspendCheck* instruction, HBasicBlock* successor);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100328 void GenerateClassInitializationCheck(LoadClassSlowPathARMVIXL* slow_path,
329 vixl32::Register class_reg);
Vladimir Marko175e7862018-03-27 09:03:13 +0000330 void GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
331 vixl::aarch32::Register temp,
332 vixl::aarch32::FlagsUpdate flags_update);
Artem Serov02109dd2016-09-23 17:17:54 +0100333 void GenerateAndConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
334 void GenerateOrrConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
335 void GenerateEorConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
Anton Kirilovdda43962016-11-21 19:55:20 +0000336 void GenerateAddLongConst(Location out, Location first, uint64_t value);
Artem Serov02109dd2016-09-23 17:17:54 +0100337 void HandleBitwiseOperation(HBinaryOperation* operation);
Scott Wakelingfe885462016-09-22 10:24:38 +0100338 void HandleCondition(HCondition* condition);
Artem Serov02109dd2016-09-23 17:17:54 +0100339 void HandleIntegerRotate(HRor* ror);
340 void HandleLongRotate(HRor* ror);
341 void HandleShift(HBinaryOperation* operation);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100342
343 void GenerateWideAtomicStore(vixl::aarch32::Register addr,
344 uint32_t offset,
345 vixl::aarch32::Register value_lo,
346 vixl::aarch32::Register value_hi,
347 vixl::aarch32::Register temp1,
348 vixl::aarch32::Register temp2,
349 HInstruction* instruction);
350 void GenerateWideAtomicLoad(vixl::aarch32::Register addr,
351 uint32_t offset,
352 vixl::aarch32::Register out_lo,
353 vixl::aarch32::Register out_hi);
354
355 void HandleFieldSet(HInstruction* instruction,
356 const FieldInfo& field_info,
357 bool value_can_be_null);
358 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
359
Aart Bik351df3e2018-03-07 11:54:57 -0800360 void GenerateMinMaxInt(LocationSummary* locations, bool is_min);
Aart Bik1f8d51b2018-02-15 10:42:37 -0800361 void GenerateMinMaxLong(LocationSummary* locations, bool is_min);
Aart Bik351df3e2018-03-07 11:54:57 -0800362 void GenerateMinMaxFloat(HInstruction* minmax, bool is_min);
363 void GenerateMinMaxDouble(HInstruction* minmax, bool is_min);
364 void GenerateMinMax(HBinaryOperation* minmax, bool is_min);
Aart Bik1f8d51b2018-02-15 10:42:37 -0800365
Artem Serovcfbe9132016-10-14 15:58:56 +0100366 // Generate a heap reference load using one register `out`:
367 //
368 // out <- *(out + offset)
369 //
370 // while honoring heap poisoning and/or read barriers (if any).
371 //
372 // Location `maybe_temp` is used when generating a read barrier and
373 // shall be a register in that case; it may be an invalid location
374 // otherwise.
375 void GenerateReferenceLoadOneRegister(HInstruction* instruction,
376 Location out,
377 uint32_t offset,
Artem Serov657022c2016-11-23 14:19:38 +0000378 Location maybe_temp,
379 ReadBarrierOption read_barrier_option);
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100380 // Generate a heap reference load using two different registers
381 // `out` and `obj`:
382 //
383 // out <- *(obj + offset)
384 //
385 // while honoring heap poisoning and/or read barriers (if any).
386 //
387 // Location `maybe_temp` is used when generating a Baker's (fast
388 // path) read barrier and shall be a register in that case; it may
389 // be an invalid location otherwise.
390 void GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
391 Location out,
392 Location obj,
393 uint32_t offset,
Artem Serov657022c2016-11-23 14:19:38 +0000394 Location maybe_temp,
395 ReadBarrierOption read_barrier_option);
Scott Wakelingfe885462016-09-22 10:24:38 +0100396 void GenerateTestAndBranch(HInstruction* instruction,
397 size_t condition_input_index,
398 vixl::aarch32::Label* true_target,
xueliang.zhongf51bc622016-11-04 09:23:32 +0000399 vixl::aarch32::Label* false_target,
400 bool far_target = true);
Scott Wakelingfe885462016-09-22 10:24:38 +0100401 void GenerateCompareTestAndBranch(HCondition* condition,
402 vixl::aarch32::Label* true_target,
Anton Kirilovfd522532017-05-10 12:46:57 +0100403 vixl::aarch32::Label* false_target,
404 bool is_far_target = true);
Scott Wakelingfe885462016-09-22 10:24:38 +0100405 void DivRemOneOrMinusOne(HBinaryOperation* instruction);
406 void DivRemByPowerOfTwo(HBinaryOperation* instruction);
407 void GenerateDivRemWithAnyConstant(HBinaryOperation* instruction);
408 void GenerateDivRemConstantIntegral(HBinaryOperation* instruction);
Artem Serovd4cc5b22016-11-04 11:19:09 +0000409 void HandleGoto(HInstruction* got, HBasicBlock* successor);
Scott Wakelingfe885462016-09-22 10:24:38 +0100410
Artem Serov8f7c4102017-06-21 11:21:37 +0100411 vixl::aarch32::MemOperand VecAddress(
412 HVecMemoryOperation* instruction,
413 // This function may acquire a scratch register.
414 vixl::aarch32::UseScratchRegisterScope* temps_scope,
415 /*out*/ vixl32::Register* scratch);
416 vixl::aarch32::AlignedMemOperand VecAddressUnaligned(
417 HVecMemoryOperation* instruction,
418 // This function may acquire a scratch register.
419 vixl::aarch32::UseScratchRegisterScope* temps_scope,
420 /*out*/ vixl32::Register* scratch);
421
Scott Wakelingfe885462016-09-22 10:24:38 +0100422 ArmVIXLAssembler* const assembler_;
423 CodeGeneratorARMVIXL* const codegen_;
424
425 DISALLOW_COPY_AND_ASSIGN(InstructionCodeGeneratorARMVIXL);
426};
427
428class CodeGeneratorARMVIXL : public CodeGenerator {
429 public:
430 CodeGeneratorARMVIXL(HGraph* graph,
431 const ArmInstructionSetFeatures& isa_features,
432 const CompilerOptions& compiler_options,
433 OptimizingCompilerStats* stats = nullptr);
Scott Wakelingfe885462016-09-22 10:24:38 +0100434 virtual ~CodeGeneratorARMVIXL() {}
435
Scott Wakelingfe885462016-09-22 10:24:38 +0100436 void GenerateFrameEntry() OVERRIDE;
437 void GenerateFrameExit() OVERRIDE;
438 void Bind(HBasicBlock* block) OVERRIDE;
439 void MoveConstant(Location destination, int32_t value) OVERRIDE;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100440 void MoveLocation(Location dst, Location src, DataType::Type dst_type) OVERRIDE;
Scott Wakelingfe885462016-09-22 10:24:38 +0100441 void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
442
Artem Serovd4cc5b22016-11-04 11:19:09 +0000443 size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
444 size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
445 size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
446 size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
447
448 size_t GetWordSize() const OVERRIDE {
449 return static_cast<size_t>(kArmPointerSize);
450 }
451
452 size_t GetFloatingPointSpillSlotSize() const OVERRIDE { return vixl::aarch32::kRegSizeInBytes; }
453
454 HGraphVisitor* GetLocationBuilder() OVERRIDE { return &location_builder_; }
455
456 HGraphVisitor* GetInstructionVisitor() OVERRIDE { return &instruction_visitor_; }
457
Scott Wakelingfe885462016-09-22 10:24:38 +0100458 ArmVIXLAssembler* GetAssembler() OVERRIDE { return &assembler_; }
459
460 const ArmVIXLAssembler& GetAssembler() const OVERRIDE { return assembler_; }
461
xueliang.zhongf51bc622016-11-04 09:23:32 +0000462 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); }
Scott Wakelingfe885462016-09-22 10:24:38 +0100463
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100464 uintptr_t GetAddressOf(HBasicBlock* block) OVERRIDE {
465 vixl::aarch32::Label* block_entry_label = GetLabelOf(block);
466 DCHECK(block_entry_label->IsBound());
467 return block_entry_label->GetLocation();
468 }
469
Artem Serov09a940d2016-11-11 16:15:11 +0000470 void FixJumpTables();
Scott Wakelingfe885462016-09-22 10:24:38 +0100471 void SetupBlockedRegisters() const OVERRIDE;
472
Scott Wakelingfe885462016-09-22 10:24:38 +0100473 void DumpCoreRegister(std::ostream& stream, int reg) const OVERRIDE;
474 void DumpFloatingPointRegister(std::ostream& stream, int reg) const OVERRIDE;
475
Artem Serovd4cc5b22016-11-04 11:19:09 +0000476 ParallelMoveResolver* GetMoveResolver() OVERRIDE { return &move_resolver_; }
Scott Wakelingfe885462016-09-22 10:24:38 +0100477 InstructionSet GetInstructionSet() const OVERRIDE { return InstructionSet::kThumb2; }
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100478 // Helper method to move a 32-bit value between two locations.
479 void Move32(Location destination, Location source);
480
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100481 void LoadFromShiftedRegOffset(DataType::Type type,
Scott Wakelingc34dba72016-10-03 10:14:44 +0100482 Location out_loc,
483 vixl::aarch32::Register base,
484 vixl::aarch32::Register reg_index,
485 vixl::aarch32::Condition cond = vixl::aarch32::al);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100486 void StoreToShiftedRegOffset(DataType::Type type,
Scott Wakelingc34dba72016-10-03 10:14:44 +0100487 Location out_loc,
488 vixl::aarch32::Register base,
489 vixl::aarch32::Register reg_index,
490 vixl::aarch32::Condition cond = vixl::aarch32::al);
491
Scott Wakelingfe885462016-09-22 10:24:38 +0100492 // Generate code to invoke a runtime entry point.
493 void InvokeRuntime(QuickEntrypointEnum entrypoint,
494 HInstruction* instruction,
495 uint32_t dex_pc,
496 SlowPathCode* slow_path = nullptr) OVERRIDE;
497
498 // Generate code to invoke a runtime entry point, but do not record
499 // PC-related information in a stack map.
500 void InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
501 HInstruction* instruction,
502 SlowPathCode* slow_path);
503
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100504 // Emit a write barrier.
505 void MarkGCCard(vixl::aarch32::Register temp,
506 vixl::aarch32::Register card,
507 vixl::aarch32::Register object,
508 vixl::aarch32::Register value,
509 bool can_be_null);
510
Artem Serovd4cc5b22016-11-04 11:19:09 +0000511 void GenerateMemoryBarrier(MemBarrierKind kind);
512
513 vixl::aarch32::Label* GetLabelOf(HBasicBlock* block) {
514 block = FirstNonEmptyBlock(block);
515 return &(block_labels_[block->GetBlockId()]);
516 }
517
Donghui Bai426b49c2016-11-08 14:55:38 +0800518 vixl32::Label* GetFinalLabel(HInstruction* instruction, vixl32::Label* final_label);
519
Artem Serovd4cc5b22016-11-04 11:19:09 +0000520 void Initialize() OVERRIDE {
521 block_labels_.resize(GetGraph()->GetBlocks().size());
522 }
523
524 void Finalize(CodeAllocator* allocator) OVERRIDE;
525
526 const ArmInstructionSetFeatures& GetInstructionSetFeatures() const { return isa_features_; }
527
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100528 bool NeedsTwoRegisters(DataType::Type type) const OVERRIDE {
529 return type == DataType::Type::kFloat64 || type == DataType::Type::kInt64;
Artem Serovd4cc5b22016-11-04 11:19:09 +0000530 }
531
532 void ComputeSpillMask() OVERRIDE;
533
534 vixl::aarch32::Label* GetFrameEntryLabel() { return &frame_entry_label_; }
535
536 // Check if the desired_string_load_kind is supported. If it is, return it,
537 // otherwise return a fall-back kind that should be used instead.
538 HLoadString::LoadKind GetSupportedLoadStringKind(
539 HLoadString::LoadKind desired_string_load_kind) OVERRIDE;
540
541 // Check if the desired_class_load_kind is supported. If it is, return it,
542 // otherwise return a fall-back kind that should be used instead.
543 HLoadClass::LoadKind GetSupportedLoadClassKind(
544 HLoadClass::LoadKind desired_class_load_kind) OVERRIDE;
545
546 // Check if the desired_dispatch_info is supported. If it is, return it,
547 // otherwise return a fall-back info that should be used instead.
548 HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
549 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
550 HInvokeStaticOrDirect* invoke) OVERRIDE;
551
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100552 void GenerateStaticOrDirectCall(
553 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) OVERRIDE;
554 void GenerateVirtualCall(
555 HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) OVERRIDE;
Artem Serovd4cc5b22016-11-04 11:19:09 +0000556
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100557 void MoveFromReturnRegister(Location trg, DataType::Type type) OVERRIDE;
Artem Serovd4cc5b22016-11-04 11:19:09 +0000558
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000559 // The PcRelativePatchInfo is used for PC-relative addressing of methods/strings/types,
560 // whether through .data.bimg.rel.ro, .bss, or directly in the boot image.
561 //
562 // The PC-relative address is loaded with three instructions,
Artem Serovd4cc5b22016-11-04 11:19:09 +0000563 // MOVW+MOVT to load the offset to base_reg and then ADD base_reg, PC. The offset
564 // is calculated from the ADD's effective PC, i.e. PC+4 on Thumb2. Though we
565 // currently emit these 3 instructions together, instruction scheduling could
566 // split this sequence apart, so we keep separate labels for each of them.
567 struct PcRelativePatchInfo {
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000568 PcRelativePatchInfo(const DexFile* dex_file, uint32_t off_or_idx)
Artem Serovd4cc5b22016-11-04 11:19:09 +0000569 : target_dex_file(dex_file), offset_or_index(off_or_idx) { }
570 PcRelativePatchInfo(PcRelativePatchInfo&& other) = default;
571
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000572 // Target dex file or null for .data.bmig.rel.ro patches.
573 const DexFile* target_dex_file;
574 // Either the boot image offset (to write to .data.bmig.rel.ro) or string/type/method index.
Artem Serovd4cc5b22016-11-04 11:19:09 +0000575 uint32_t offset_or_index;
576 vixl::aarch32::Label movw_label;
577 vixl::aarch32::Label movt_label;
578 vixl::aarch32::Label add_pc_label;
579 };
580
Vladimir Markob066d432018-01-03 13:14:37 +0000581 PcRelativePatchInfo* NewBootImageRelRoPatch(uint32_t boot_image_offset);
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000582 PcRelativePatchInfo* NewBootImageMethodPatch(MethodReference target_method);
Vladimir Marko0eb882b2017-05-15 13:39:18 +0100583 PcRelativePatchInfo* NewMethodBssEntryPatch(MethodReference target_method);
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000584 PcRelativePatchInfo* NewBootImageTypePatch(const DexFile& dex_file, dex::TypeIndex type_index);
Vladimir Marko1998cd02017-01-13 13:02:58 +0000585 PcRelativePatchInfo* NewTypeBssEntryPatch(const DexFile& dex_file, dex::TypeIndex type_index);
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000586 PcRelativePatchInfo* NewBootImageStringPatch(const DexFile& dex_file,
587 dex::StringIndex string_index);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100588 PcRelativePatchInfo* NewStringBssEntryPatch(const DexFile& dex_file,
589 dex::StringIndex string_index);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +0100590
591 // Add a new baker read barrier patch and return the label to be bound
592 // before the BNE instruction.
593 vixl::aarch32::Label* NewBakerReadBarrierPatch(uint32_t custom_data);
594
Artem Serovc5fcb442016-12-02 19:19:58 +0000595 VIXLUInt32Literal* DeduplicateBootImageAddressLiteral(uint32_t address);
Artem Serovc5fcb442016-12-02 19:19:58 +0000596 VIXLUInt32Literal* DeduplicateJitStringLiteral(const DexFile& dex_file,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +0000597 dex::StringIndex string_index,
598 Handle<mirror::String> handle);
Artem Serovc5fcb442016-12-02 19:19:58 +0000599 VIXLUInt32Literal* DeduplicateJitClassLiteral(const DexFile& dex_file,
600 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +0000601 Handle<mirror::Class> handle);
Artem Serovc5fcb442016-12-02 19:19:58 +0000602
Vladimir Markod8dbc8d2017-09-20 13:37:47 +0100603 void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) OVERRIDE;
Vladimir Markoca1e0382018-04-11 09:58:41 +0000604 bool NeedsThunkCode(const linker::LinkerPatch& patch) const OVERRIDE;
605 void EmitThunkCode(const linker::LinkerPatch& patch,
606 /*out*/ ArenaVector<uint8_t>* code,
607 /*out*/ std::string* debug_name) OVERRIDE;
Artem Serovd4cc5b22016-11-04 11:19:09 +0000608
Artem Serovc5fcb442016-12-02 19:19:58 +0000609 void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
610
Vladimir Markoca1e0382018-04-11 09:58:41 +0000611 // Generate a GC root reference load:
612 //
613 // root <- *(obj + offset)
614 //
615 // while honoring read barriers based on read_barrier_option.
616 void GenerateGcRootFieldLoad(HInstruction* instruction,
617 Location root,
618 vixl::aarch32::Register obj,
619 uint32_t offset,
620 ReadBarrierOption read_barrier_option);
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100621 // Fast path implementation of ReadBarrier::Barrier for a heap
622 // reference field load when Baker's read barriers are used.
623 void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
624 Location ref,
625 vixl::aarch32::Register obj,
626 uint32_t offset,
627 Location temp,
628 bool needs_null_check);
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000629 // Fast path implementation of ReadBarrier::Barrier for a heap
630 // reference array load when Baker's read barriers are used.
631 void GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
632 Location ref,
633 vixl::aarch32::Register obj,
634 uint32_t data_offset,
635 Location index,
636 Location temp,
637 bool needs_null_check);
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100638 // Factored implementation, used by GenerateFieldLoadWithBakerReadBarrier,
639 // GenerateArrayLoadWithBakerReadBarrier and some intrinsics.
640 //
641 // Load the object reference located at the address
642 // `obj + offset + (index << scale_factor)`, held by object `obj`, into
643 // `ref`, and mark it if needed.
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100644 void GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
645 Location ref,
646 vixl::aarch32::Register obj,
647 uint32_t offset,
648 Location index,
649 ScaleFactor scale_factor,
650 Location temp,
Roland Levillainff487002017-03-07 16:50:01 +0000651 bool needs_null_check);
652
653 // Generate code checking whether the the reference field at the
654 // address `obj + field_offset`, held by object `obj`, needs to be
655 // marked, and if so, marking it and updating the field within `obj`
656 // with the marked value.
657 //
658 // This routine is used for the implementation of the
659 // UnsafeCASObject intrinsic with Baker read barriers.
660 //
661 // This method has a structure similar to
662 // GenerateReferenceLoadWithBakerReadBarrier, but note that argument
663 // `ref` is only as a temporary here, and thus its value should not
664 // be used afterwards.
665 void UpdateReferenceFieldWithBakerReadBarrier(HInstruction* instruction,
666 Location ref,
667 vixl::aarch32::Register obj,
668 Location field_offset,
669 Location temp,
670 bool needs_null_check,
671 vixl::aarch32::Register temp2);
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100672
Roland Levillainba650a42017-03-06 13:52:32 +0000673 // Generate a heap reference load (with no read barrier).
674 void GenerateRawReferenceLoad(HInstruction* instruction,
675 Location ref,
676 vixl::aarch32::Register obj,
677 uint32_t offset,
678 Location index,
679 ScaleFactor scale_factor,
680 bool needs_null_check);
681
Roland Levillain5daa4952017-07-03 17:23:56 +0100682 // Emit code checking the status of the Marking Register, and
683 // aborting the program if MR does not match the value stored in the
684 // art::Thread object. Code is only emitted in debug mode and if
685 // CompilerOptions::EmitRunTimeChecksInDebugMode returns true.
686 //
687 // Argument `code` is used to identify the different occurrences of
688 // MaybeGenerateMarkingRegisterCheck in the code generator, and is
689 // used together with kMarkingRegisterCheckBreakCodeBaseCode to
690 // create the value passed to the BKPT instruction. Note that unlike
691 // in the ARM64 code generator, where `__LINE__` is passed as `code`
692 // argument to
693 // CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck, we cannot
694 // realistically do that here, as Encoding T1 for the BKPT
695 // instruction only accepts 8-bit immediate values.
696 //
697 // If `temp_loc` is a valid location, it is expected to be a
698 // register and will be used as a temporary to generate code;
699 // otherwise, a temporary will be fetched from the core register
700 // scratch pool.
701 virtual void MaybeGenerateMarkingRegisterCheck(int code,
702 Location temp_loc = Location::NoLocation());
703
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100704 // Generate a read barrier for a heap reference within `instruction`
705 // using a slow path.
706 //
707 // A read barrier for an object reference read from the heap is
708 // implemented as a call to the artReadBarrierSlow runtime entry
709 // point, which is passed the values in locations `ref`, `obj`, and
710 // `offset`:
711 //
712 // mirror::Object* artReadBarrierSlow(mirror::Object* ref,
713 // mirror::Object* obj,
714 // uint32_t offset);
715 //
716 // The `out` location contains the value returned by
717 // artReadBarrierSlow.
718 //
719 // When `index` is provided (i.e. for array accesses), the offset
720 // value passed to artReadBarrierSlow is adjusted to take `index`
721 // into account.
722 void GenerateReadBarrierSlow(HInstruction* instruction,
723 Location out,
724 Location ref,
725 Location obj,
726 uint32_t offset,
727 Location index = Location::NoLocation());
728
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100729 // If read barriers are enabled, generate a read barrier for a heap
730 // reference using a slow path. If heap poisoning is enabled, also
731 // unpoison the reference in `out`.
732 void MaybeGenerateReadBarrierSlow(HInstruction* instruction,
733 Location out,
734 Location ref,
735 Location obj,
736 uint32_t offset,
737 Location index = Location::NoLocation());
738
Anton Kirilovedb2ac32016-11-30 15:14:10 +0000739 // Generate a read barrier for a GC root within `instruction` using
740 // a slow path.
741 //
742 // A read barrier for an object reference GC root is implemented as
743 // a call to the artReadBarrierForRootSlow runtime entry point,
744 // which is passed the value in location `root`:
745 //
746 // mirror::Object* artReadBarrierForRootSlow(GcRoot<mirror::Object>* root);
747 //
748 // The `out` location contains the value returned by
749 // artReadBarrierForRootSlow.
750 void GenerateReadBarrierForRootSlow(HInstruction* instruction, Location out, Location root);
751
Scott Wakelingfe885462016-09-22 10:24:38 +0100752 void GenerateNop() OVERRIDE;
753
Artem Serovd4cc5b22016-11-04 11:19:09 +0000754 void GenerateImplicitNullCheck(HNullCheck* instruction) OVERRIDE;
755 void GenerateExplicitNullCheck(HNullCheck* instruction) OVERRIDE;
756
757 JumpTableARMVIXL* CreateJumpTable(HPackedSwitch* switch_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100758 jump_tables_.emplace_back(new (GetGraph()->GetAllocator()) JumpTableARMVIXL(switch_instr));
Artem Serovd4cc5b22016-11-04 11:19:09 +0000759 return jump_tables_.back().get();
760 }
761 void EmitJumpTables();
762
763 void EmitMovwMovtPlaceholder(CodeGeneratorARMVIXL::PcRelativePatchInfo* labels,
764 vixl::aarch32::Register out);
765
Anton Kirilov5601d4e2017-05-11 19:33:50 +0100766 // `temp` is an extra temporary register that is used for some conditions;
767 // callers may not specify it, in which case the method will use a scratch
768 // register instead.
769 void GenerateConditionWithZero(IfCondition condition,
770 vixl::aarch32::Register out,
771 vixl::aarch32::Register in,
772 vixl::aarch32::Register temp = vixl32::Register());
773
Scott Wakelingfe885462016-09-22 10:24:38 +0100774 private:
Vladimir Markoca1e0382018-04-11 09:58:41 +0000775 // Encoding of thunk type and data for link-time generated thunks for Baker read barriers.
776
777 enum class BakerReadBarrierKind : uint8_t {
778 kField, // Field get or array get with constant offset (i.e. constant index).
779 kArray, // Array get with index in register.
780 kGcRoot, // GC root load.
781 kLast = kGcRoot
782 };
783
784 enum class BakerReadBarrierWidth : uint8_t {
785 kWide, // 32-bit LDR (and 32-bit NEG if heap poisoning is enabled).
786 kNarrow, // 16-bit LDR (and 16-bit NEG if heap poisoning is enabled).
787 kLast = kNarrow
788 };
789
790 static constexpr uint32_t kBakerReadBarrierInvalidEncodedReg = /* pc is invalid */ 15u;
791
792 static constexpr size_t kBitsForBakerReadBarrierKind =
793 MinimumBitsToStore(static_cast<size_t>(BakerReadBarrierKind::kLast));
794 static constexpr size_t kBakerReadBarrierBitsForRegister =
795 MinimumBitsToStore(kBakerReadBarrierInvalidEncodedReg);
796 using BakerReadBarrierKindField =
797 BitField<BakerReadBarrierKind, 0, kBitsForBakerReadBarrierKind>;
798 using BakerReadBarrierFirstRegField =
799 BitField<uint32_t, kBitsForBakerReadBarrierKind, kBakerReadBarrierBitsForRegister>;
800 using BakerReadBarrierSecondRegField =
801 BitField<uint32_t,
802 kBitsForBakerReadBarrierKind + kBakerReadBarrierBitsForRegister,
803 kBakerReadBarrierBitsForRegister>;
804 static constexpr size_t kBitsForBakerReadBarrierWidth =
805 MinimumBitsToStore(static_cast<size_t>(BakerReadBarrierWidth::kLast));
806 using BakerReadBarrierWidthField =
807 BitField<BakerReadBarrierWidth,
808 kBitsForBakerReadBarrierKind + 2 * kBakerReadBarrierBitsForRegister,
809 kBitsForBakerReadBarrierWidth>;
810
811 static void CheckValidReg(uint32_t reg) {
Vladimir Markodcd117e2018-04-19 11:54:00 +0100812 DCHECK(reg < vixl::aarch32::ip.GetCode() && reg != mr.GetCode()) << reg;
Vladimir Markoca1e0382018-04-11 09:58:41 +0000813 }
814
815 static uint32_t EncodeBakerReadBarrierFieldData(uint32_t base_reg,
816 uint32_t holder_reg,
817 bool narrow) {
818 CheckValidReg(base_reg);
819 CheckValidReg(holder_reg);
820 DCHECK(!narrow || base_reg < 8u) << base_reg;
821 BakerReadBarrierWidth width =
822 narrow ? BakerReadBarrierWidth::kNarrow : BakerReadBarrierWidth::kWide;
823 return BakerReadBarrierKindField::Encode(BakerReadBarrierKind::kField) |
824 BakerReadBarrierFirstRegField::Encode(base_reg) |
825 BakerReadBarrierSecondRegField::Encode(holder_reg) |
826 BakerReadBarrierWidthField::Encode(width);
827 }
828
829 static uint32_t EncodeBakerReadBarrierArrayData(uint32_t base_reg) {
830 CheckValidReg(base_reg);
831 return BakerReadBarrierKindField::Encode(BakerReadBarrierKind::kArray) |
832 BakerReadBarrierFirstRegField::Encode(base_reg) |
833 BakerReadBarrierSecondRegField::Encode(kBakerReadBarrierInvalidEncodedReg) |
834 BakerReadBarrierWidthField::Encode(BakerReadBarrierWidth::kWide);
835 }
836
837 static uint32_t EncodeBakerReadBarrierGcRootData(uint32_t root_reg, bool narrow) {
838 CheckValidReg(root_reg);
839 DCHECK(!narrow || root_reg < 8u) << root_reg;
840 BakerReadBarrierWidth width =
841 narrow ? BakerReadBarrierWidth::kNarrow : BakerReadBarrierWidth::kWide;
842 return BakerReadBarrierKindField::Encode(BakerReadBarrierKind::kGcRoot) |
843 BakerReadBarrierFirstRegField::Encode(root_reg) |
844 BakerReadBarrierSecondRegField::Encode(kBakerReadBarrierInvalidEncodedReg) |
845 BakerReadBarrierWidthField::Encode(width);
846 }
847
848 void CompileBakerReadBarrierThunk(ArmVIXLAssembler& assembler,
849 uint32_t encoded_data,
850 /*out*/ std::string* debug_name);
851
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100852 vixl::aarch32::Register GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
853 vixl::aarch32::Register temp);
854
Artem Serovc5fcb442016-12-02 19:19:58 +0000855 using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, VIXLUInt32Literal*>;
Artem Serovc5fcb442016-12-02 19:19:58 +0000856 using StringToLiteralMap = ArenaSafeMap<StringReference,
857 VIXLUInt32Literal*,
858 StringReferenceValueComparator>;
859 using TypeToLiteralMap = ArenaSafeMap<TypeReference,
860 VIXLUInt32Literal*,
861 TypeReferenceValueComparator>;
Artem Serovd4cc5b22016-11-04 11:19:09 +0000862
Vladimir Markoeee1c0e2017-04-21 17:58:41 +0100863 struct BakerReadBarrierPatchInfo {
864 explicit BakerReadBarrierPatchInfo(uint32_t data) : label(), custom_data(data) { }
865
866 vixl::aarch32::Label label;
867 uint32_t custom_data;
868 };
869
Artem Serovc5fcb442016-12-02 19:19:58 +0000870 VIXLUInt32Literal* DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map);
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000871 PcRelativePatchInfo* NewPcRelativePatch(const DexFile* dex_file,
Artem Serovd4cc5b22016-11-04 11:19:09 +0000872 uint32_t offset_or_index,
873 ArenaDeque<PcRelativePatchInfo>* patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +0100874 template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Artem Serovd4cc5b22016-11-04 11:19:09 +0000875 static void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +0100876 ArenaVector<linker::LinkerPatch>* linker_patches);
Artem Serovd4cc5b22016-11-04 11:19:09 +0000877
Scott Wakelingfe885462016-09-22 10:24:38 +0100878 // Labels for each block that will be compiled.
879 // We use a deque so that the `vixl::aarch32::Label` objects do not move in memory.
880 ArenaDeque<vixl::aarch32::Label> block_labels_; // Indexed by block id.
881 vixl::aarch32::Label frame_entry_label_;
882
Artem Serov551b28f2016-10-18 19:11:30 +0100883 ArenaVector<std::unique_ptr<JumpTableARMVIXL>> jump_tables_;
Scott Wakelingfe885462016-09-22 10:24:38 +0100884 LocationsBuilderARMVIXL location_builder_;
885 InstructionCodeGeneratorARMVIXL instruction_visitor_;
886 ParallelMoveResolverARMVIXL move_resolver_;
887
888 ArmVIXLAssembler assembler_;
889 const ArmInstructionSetFeatures& isa_features_;
890
Artem Serovc5fcb442016-12-02 19:19:58 +0000891 // Deduplication map for 32-bit literals, used for non-patchable boot image addresses.
892 Uint32ToLiteralMap uint32_literals_;
Vladimir Markob066d432018-01-03 13:14:37 +0000893 // PC-relative method patch info for kBootImageLinkTimePcRelative/kBootImageRelRo.
Vladimir Markoe47f60c2018-02-21 13:43:28 +0000894 // Also used for type/string patches for kBootImageRelRo (same linker patch as for methods).
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000895 ArenaDeque<PcRelativePatchInfo> boot_image_method_patches_;
Vladimir Marko0eb882b2017-05-15 13:39:18 +0100896 // PC-relative method patch info for kBssEntry.
897 ArenaDeque<PcRelativePatchInfo> method_bss_entry_patches_;
Vladimir Marko1998cd02017-01-13 13:02:58 +0000898 // PC-relative type patch info for kBootImageLinkTimePcRelative.
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000899 ArenaDeque<PcRelativePatchInfo> boot_image_type_patches_;
Vladimir Marko1998cd02017-01-13 13:02:58 +0000900 // PC-relative type patch info for kBssEntry.
901 ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
Vladimir Markoe47f60c2018-02-21 13:43:28 +0000902 // PC-relative String patch info for kBootImageLinkTimePcRelative.
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000903 ArenaDeque<PcRelativePatchInfo> boot_image_string_patches_;
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100904 // PC-relative String patch info for kBssEntry.
905 ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
Vladimir Markoeee1c0e2017-04-21 17:58:41 +0100906 // Baker read barrier patch info.
907 ArenaDeque<BakerReadBarrierPatchInfo> baker_read_barrier_patches_;
Artem Serovc5fcb442016-12-02 19:19:58 +0000908
909 // Patches for string literals in JIT compiled code.
910 StringToLiteralMap jit_string_patches_;
911 // Patches for class literals in JIT compiled code.
912 TypeToLiteralMap jit_class_patches_;
Artem Serovd4cc5b22016-11-04 11:19:09 +0000913
Vladimir Markoca1e0382018-04-11 09:58:41 +0000914 friend class linker::Thumb2RelativePatcherTest;
Scott Wakelingfe885462016-09-22 10:24:38 +0100915 DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARMVIXL);
916};
917
Scott Wakelingfe885462016-09-22 10:24:38 +0100918} // namespace arm
919} // namespace art
920
921#endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM_VIXL_H_