blob: b1a941d12a6643b70eb8833cd6d747984867dd29 [file] [log] [blame]
Scott Wakelingfe885462016-09-22 10:24:38 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm_vixl.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "art_method.h"
21#include "code_generator_utils.h"
22#include "common_arm.h"
23#include "compiled_method.h"
24#include "entrypoints/quick/quick_entrypoints.h"
25#include "gc/accounting/card_table.h"
26#include "mirror/array-inl.h"
27#include "mirror/class-inl.h"
28#include "thread.h"
29#include "utils/arm/assembler_arm_vixl.h"
30#include "utils/arm/managed_register_arm.h"
31#include "utils/assembler.h"
32#include "utils/stack_checks.h"
33
34namespace art {
35namespace arm {
36
37namespace vixl32 = vixl::aarch32;
38using namespace vixl32; // NOLINT(build/namespaces)
39
40using helpers::DWARFReg;
41using helpers::FromLowSToD;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010042using helpers::HighDRegisterFrom;
43using helpers::HighRegisterFrom;
Scott Wakelingfe885462016-09-22 10:24:38 +010044using helpers::InputOperandAt;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010045using helpers::InputRegisterAt;
Scott Wakelingfe885462016-09-22 10:24:38 +010046using helpers::InputSRegisterAt;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010047using helpers::InputVRegisterAt;
48using helpers::LocationFrom;
49using helpers::LowRegisterFrom;
50using helpers::LowSRegisterFrom;
51using helpers::OutputRegister;
52using helpers::OutputSRegister;
53using helpers::OutputVRegister;
54using helpers::RegisterFrom;
55using helpers::SRegisterFrom;
Scott Wakelingfe885462016-09-22 10:24:38 +010056
57using RegisterList = vixl32::RegisterList;
58
59static bool ExpectedPairLayout(Location location) {
60 // We expected this for both core and fpu register pairs.
61 return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
62}
63
64static constexpr size_t kArmInstrMaxSizeInBytes = 4u;
65
66#ifdef __
67#error "ARM Codegen VIXL macro-assembler macro already defined."
68#endif
69
Scott Wakelingfe885462016-09-22 10:24:38 +010070// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
71#define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT
72#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, x).Int32Value()
73
74// Marker that code is yet to be, and must, be implemented.
75#define TODO_VIXL32(level) LOG(level) << __PRETTY_FUNCTION__ << " unimplemented "
76
Scott Wakelinga7812ae2016-10-17 10:03:36 +010077// SaveLiveRegisters and RestoreLiveRegisters from SlowPathCodeARM operate on sets of S registers,
78// for each live D registers they treat two corresponding S registers as live ones.
79//
80// Two following functions (SaveContiguousSRegisterList, RestoreContiguousSRegisterList) build
81// from a list of contiguous S registers a list of contiguous D registers (processing first/last
82// S registers corner cases) and save/restore this new list treating them as D registers.
83// - decreasing code size
84// - avoiding hazards on Cortex-A57, when a pair of S registers for an actual live D register is
85// restored and then used in regular non SlowPath code as D register.
86//
87// For the following example (v means the S register is live):
88// D names: | D0 | D1 | D2 | D4 | ...
89// S names: | S0 | S1 | S2 | S3 | S4 | S5 | S6 | S7 | ...
90// Live? | | v | v | v | v | v | v | | ...
91//
92// S1 and S6 will be saved/restored independently; D registers list (D1, D2) will be processed
93// as D registers.
94//
95// TODO(VIXL): All this code should be unnecessary once the VIXL AArch32 backend provides helpers
96// for lists of floating-point registers.
97static size_t SaveContiguousSRegisterList(size_t first,
98 size_t last,
99 CodeGenerator* codegen,
100 size_t stack_offset) {
101 static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes.");
102 static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes.");
103 DCHECK_LE(first, last);
104 if ((first == last) && (first == 0)) {
105 __ Vstr(vixl32::SRegister(first), MemOperand(sp, stack_offset));
106 return stack_offset + kSRegSizeInBytes;
107 }
108 if (first % 2 == 1) {
109 __ Vstr(vixl32::SRegister(first++), MemOperand(sp, stack_offset));
110 stack_offset += kSRegSizeInBytes;
111 }
112
113 bool save_last = false;
114 if (last % 2 == 0) {
115 save_last = true;
116 --last;
117 }
118
119 if (first < last) {
120 vixl32::DRegister d_reg = vixl32::DRegister(first / 2);
121 DCHECK_EQ((last - first + 1) % 2, 0u);
122 size_t number_of_d_regs = (last - first + 1) / 2;
123
124 if (number_of_d_regs == 1) {
125 __ Vstr(d_reg, MemOperand(sp, stack_offset));
126 } else if (number_of_d_regs > 1) {
127 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
128 vixl32::Register base = sp;
129 if (stack_offset != 0) {
130 base = temps.Acquire();
131 __ Add(base, sp, stack_offset);
132 }
133 __ Vstm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs));
134 }
135 stack_offset += number_of_d_regs * kDRegSizeInBytes;
136 }
137
138 if (save_last) {
139 __ Vstr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset));
140 stack_offset += kSRegSizeInBytes;
141 }
142
143 return stack_offset;
144}
145
146static size_t RestoreContiguousSRegisterList(size_t first,
147 size_t last,
148 CodeGenerator* codegen,
149 size_t stack_offset) {
150 static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes.");
151 static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes.");
152 DCHECK_LE(first, last);
153 if ((first == last) && (first == 0)) {
154 __ Vldr(vixl32::SRegister(first), MemOperand(sp, stack_offset));
155 return stack_offset + kSRegSizeInBytes;
156 }
157 if (first % 2 == 1) {
158 __ Vldr(vixl32::SRegister(first++), MemOperand(sp, stack_offset));
159 stack_offset += kSRegSizeInBytes;
160 }
161
162 bool restore_last = false;
163 if (last % 2 == 0) {
164 restore_last = true;
165 --last;
166 }
167
168 if (first < last) {
169 vixl32::DRegister d_reg = vixl32::DRegister(first / 2);
170 DCHECK_EQ((last - first + 1) % 2, 0u);
171 size_t number_of_d_regs = (last - first + 1) / 2;
172 if (number_of_d_regs == 1) {
173 __ Vldr(d_reg, MemOperand(sp, stack_offset));
174 } else if (number_of_d_regs > 1) {
175 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
176 vixl32::Register base = sp;
177 if (stack_offset != 0) {
178 base = temps.Acquire();
179 __ Add(base, sp, stack_offset);
180 }
181 __ Vldm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs));
182 }
183 stack_offset += number_of_d_regs * kDRegSizeInBytes;
184 }
185
186 if (restore_last) {
187 __ Vldr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset));
188 stack_offset += kSRegSizeInBytes;
189 }
190
191 return stack_offset;
192}
193
194void SlowPathCodeARMVIXL::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
195 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
196 size_t orig_offset = stack_offset;
197
198 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
199 for (uint32_t i : LowToHighBits(core_spills)) {
200 // If the register holds an object, update the stack mask.
201 if (locations->RegisterContainsObject(i)) {
202 locations->SetStackBit(stack_offset / kVRegSize);
203 }
204 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
205 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
206 saved_core_stack_offsets_[i] = stack_offset;
207 stack_offset += kArmWordSize;
208 }
209
210 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
211 arm_codegen->GetAssembler()->StoreRegisterList(core_spills, orig_offset);
212
213 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
214 orig_offset = stack_offset;
215 for (uint32_t i : LowToHighBits(fp_spills)) {
216 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
217 saved_fpu_stack_offsets_[i] = stack_offset;
218 stack_offset += kArmWordSize;
219 }
220
221 stack_offset = orig_offset;
222 while (fp_spills != 0u) {
223 uint32_t begin = CTZ(fp_spills);
224 uint32_t tmp = fp_spills + (1u << begin);
225 fp_spills &= tmp; // Clear the contiguous range of 1s.
226 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
227 stack_offset = SaveContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
228 }
229 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
230}
231
232void SlowPathCodeARMVIXL::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
233 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
234 size_t orig_offset = stack_offset;
235
236 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
237 for (uint32_t i : LowToHighBits(core_spills)) {
238 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
239 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
240 stack_offset += kArmWordSize;
241 }
242
243 // TODO(VIXL): Check the coherency of stack_offset after this with a test.
244 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
245 arm_codegen->GetAssembler()->LoadRegisterList(core_spills, orig_offset);
246
247 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
248 while (fp_spills != 0u) {
249 uint32_t begin = CTZ(fp_spills);
250 uint32_t tmp = fp_spills + (1u << begin);
251 fp_spills &= tmp; // Clear the contiguous range of 1s.
252 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
253 stack_offset = RestoreContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
254 }
255 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
256}
257
258class NullCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
259 public:
260 explicit NullCheckSlowPathARMVIXL(HNullCheck* instruction) : SlowPathCodeARMVIXL(instruction) {}
261
262 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
263 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
264 __ Bind(GetEntryLabel());
265 if (instruction_->CanThrowIntoCatchBlock()) {
266 // Live registers will be restored in the catch block if caught.
267 SaveLiveRegisters(codegen, instruction_->GetLocations());
268 }
269 arm_codegen->InvokeRuntime(kQuickThrowNullPointer,
270 instruction_,
271 instruction_->GetDexPc(),
272 this);
273 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
274 }
275
276 bool IsFatal() const OVERRIDE { return true; }
277
278 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARMVIXL"; }
279
280 private:
281 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARMVIXL);
282};
283
Scott Wakelingfe885462016-09-22 10:24:38 +0100284class DivZeroCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
285 public:
286 explicit DivZeroCheckSlowPathARMVIXL(HDivZeroCheck* instruction)
287 : SlowPathCodeARMVIXL(instruction) {}
288
289 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100290 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
Scott Wakelingfe885462016-09-22 10:24:38 +0100291 __ Bind(GetEntryLabel());
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100292 arm_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Scott Wakelingfe885462016-09-22 10:24:38 +0100293 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
294 }
295
296 bool IsFatal() const OVERRIDE { return true; }
297
298 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARMVIXL"; }
299
300 private:
301 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARMVIXL);
302};
303
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100304class SuspendCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
305 public:
306 SuspendCheckSlowPathARMVIXL(HSuspendCheck* instruction, HBasicBlock* successor)
307 : SlowPathCodeARMVIXL(instruction), successor_(successor) {}
308
309 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
310 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
311 __ Bind(GetEntryLabel());
312 arm_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
313 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
314 if (successor_ == nullptr) {
315 __ B(GetReturnLabel());
316 } else {
317 __ B(arm_codegen->GetLabelOf(successor_));
318 }
319 }
320
321 vixl32::Label* GetReturnLabel() {
322 DCHECK(successor_ == nullptr);
323 return &return_label_;
324 }
325
326 HBasicBlock* GetSuccessor() const {
327 return successor_;
328 }
329
330 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARMVIXL"; }
331
332 private:
333 // If not null, the block to branch to after the suspend check.
334 HBasicBlock* const successor_;
335
336 // If `successor_` is null, the label to branch to after the suspend check.
337 vixl32::Label return_label_;
338
339 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARMVIXL);
340};
341
342class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
343 public:
344 LoadClassSlowPathARMVIXL(HLoadClass* cls, HInstruction* at, uint32_t dex_pc, bool do_clinit)
345 : SlowPathCodeARMVIXL(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
346 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
347 }
348
349 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
350 LocationSummary* locations = at_->GetLocations();
351
352 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
353 __ Bind(GetEntryLabel());
354 SaveLiveRegisters(codegen, locations);
355
356 InvokeRuntimeCallingConventionARMVIXL calling_convention;
357 __ Mov(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
358 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
359 : kQuickInitializeType;
360 arm_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
361 if (do_clinit_) {
362 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
363 } else {
364 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
365 }
366
367 // Move the class to the desired location.
368 Location out = locations->Out();
369 if (out.IsValid()) {
370 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
371 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
372 }
373 RestoreLiveRegisters(codegen, locations);
374 __ B(GetExitLabel());
375 }
376
377 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARMVIXL"; }
378
379 private:
380 // The class this slow path will load.
381 HLoadClass* const cls_;
382
383 // The instruction where this slow path is happening.
384 // (Might be the load class or an initialization check).
385 HInstruction* const at_;
386
387 // The dex PC of `at_`.
388 const uint32_t dex_pc_;
389
390 // Whether to initialize the class.
391 const bool do_clinit_;
392
393 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARMVIXL);
394};
395
Scott Wakelingfe885462016-09-22 10:24:38 +0100396inline vixl32::Condition ARMCondition(IfCondition cond) {
397 switch (cond) {
398 case kCondEQ: return eq;
399 case kCondNE: return ne;
400 case kCondLT: return lt;
401 case kCondLE: return le;
402 case kCondGT: return gt;
403 case kCondGE: return ge;
404 case kCondB: return lo;
405 case kCondBE: return ls;
406 case kCondA: return hi;
407 case kCondAE: return hs;
408 }
409 LOG(FATAL) << "Unreachable";
410 UNREACHABLE();
411}
412
413// Maps signed condition to unsigned condition.
414inline vixl32::Condition ARMUnsignedCondition(IfCondition cond) {
415 switch (cond) {
416 case kCondEQ: return eq;
417 case kCondNE: return ne;
418 // Signed to unsigned.
419 case kCondLT: return lo;
420 case kCondLE: return ls;
421 case kCondGT: return hi;
422 case kCondGE: return hs;
423 // Unsigned remain unchanged.
424 case kCondB: return lo;
425 case kCondBE: return ls;
426 case kCondA: return hi;
427 case kCondAE: return hs;
428 }
429 LOG(FATAL) << "Unreachable";
430 UNREACHABLE();
431}
432
433inline vixl32::Condition ARMFPCondition(IfCondition cond, bool gt_bias) {
434 // The ARM condition codes can express all the necessary branches, see the
435 // "Meaning (floating-point)" column in the table A8-1 of the ARMv7 reference manual.
436 // There is no dex instruction or HIR that would need the missing conditions
437 // "equal or unordered" or "not equal".
438 switch (cond) {
439 case kCondEQ: return eq;
440 case kCondNE: return ne /* unordered */;
441 case kCondLT: return gt_bias ? cc : lt /* unordered */;
442 case kCondLE: return gt_bias ? ls : le /* unordered */;
443 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
444 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
445 default:
446 LOG(FATAL) << "UNREACHABLE";
447 UNREACHABLE();
448 }
449}
450
Scott Wakelingfe885462016-09-22 10:24:38 +0100451void CodeGeneratorARMVIXL::DumpCoreRegister(std::ostream& stream, int reg) const {
452 stream << vixl32::Register(reg);
453}
454
455void CodeGeneratorARMVIXL::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
456 stream << vixl32::SRegister(reg);
457}
458
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100459static uint32_t ComputeSRegisterListMask(const SRegisterList& regs) {
Scott Wakelingfe885462016-09-22 10:24:38 +0100460 uint32_t mask = 0;
461 for (uint32_t i = regs.GetFirstSRegister().GetCode();
462 i <= regs.GetLastSRegister().GetCode();
463 ++i) {
464 mask |= (1 << i);
465 }
466 return mask;
467}
468
469#undef __
470
471CodeGeneratorARMVIXL::CodeGeneratorARMVIXL(HGraph* graph,
472 const ArmInstructionSetFeatures& isa_features,
473 const CompilerOptions& compiler_options,
474 OptimizingCompilerStats* stats)
475 : CodeGenerator(graph,
476 kNumberOfCoreRegisters,
477 kNumberOfSRegisters,
478 kNumberOfRegisterPairs,
479 kCoreCalleeSaves.GetList(),
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100480 ComputeSRegisterListMask(kFpuCalleeSaves),
Scott Wakelingfe885462016-09-22 10:24:38 +0100481 compiler_options,
482 stats),
483 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
484 location_builder_(graph, this),
485 instruction_visitor_(graph, this),
486 move_resolver_(graph->GetArena(), this),
487 assembler_(graph->GetArena()),
488 isa_features_(isa_features) {
489 // Always save the LR register to mimic Quick.
490 AddAllocatedRegister(Location::RegisterLocation(LR));
491}
492
493#define __ reinterpret_cast<ArmVIXLAssembler*>(GetAssembler())->GetVIXLAssembler()->
494
495void CodeGeneratorARMVIXL::Finalize(CodeAllocator* allocator) {
496 GetAssembler()->FinalizeCode();
497 CodeGenerator::Finalize(allocator);
498}
499
500void CodeGeneratorARMVIXL::SetupBlockedRegisters() const {
501 // Don't allocate the dalvik style register pair passing.
502 blocked_register_pairs_[R1_R2] = true;
503
504 // Stack register, LR and PC are always reserved.
505 blocked_core_registers_[SP] = true;
506 blocked_core_registers_[LR] = true;
507 blocked_core_registers_[PC] = true;
508
509 // Reserve thread register.
510 blocked_core_registers_[TR] = true;
511
512 // Reserve temp register.
513 blocked_core_registers_[IP] = true;
514
515 if (GetGraph()->IsDebuggable()) {
516 // Stubs do not save callee-save floating point registers. If the graph
517 // is debuggable, we need to deal with these registers differently. For
518 // now, just block them.
519 for (uint32_t i = kFpuCalleeSaves.GetFirstSRegister().GetCode();
520 i <= kFpuCalleeSaves.GetLastSRegister().GetCode();
521 ++i) {
522 blocked_fpu_registers_[i] = true;
523 }
524 }
525
526 UpdateBlockedPairRegisters();
527}
528
529// Blocks all register pairs containing blocked core registers.
530void CodeGeneratorARMVIXL::UpdateBlockedPairRegisters() const {
531 for (int i = 0; i < kNumberOfRegisterPairs; i++) {
532 ArmManagedRegister current =
533 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
534 if (blocked_core_registers_[current.AsRegisterPairLow()]
535 || blocked_core_registers_[current.AsRegisterPairHigh()]) {
536 blocked_register_pairs_[i] = true;
537 }
538 }
539}
540
Scott Wakelingfe885462016-09-22 10:24:38 +0100541InstructionCodeGeneratorARMVIXL::InstructionCodeGeneratorARMVIXL(HGraph* graph,
542 CodeGeneratorARMVIXL* codegen)
543 : InstructionCodeGenerator(graph, codegen),
544 assembler_(codegen->GetAssembler()),
545 codegen_(codegen) {}
546
547void CodeGeneratorARMVIXL::ComputeSpillMask() {
548 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
549 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
550 // There is no easy instruction to restore just the PC on thumb2. We spill and
551 // restore another arbitrary register.
552 core_spill_mask_ |= (1 << kCoreAlwaysSpillRegister.GetCode());
553 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
554 // We use vpush and vpop for saving and restoring floating point registers, which take
555 // a SRegister and the number of registers to save/restore after that SRegister. We
556 // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
557 // but in the range.
558 if (fpu_spill_mask_ != 0) {
559 uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
560 uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
561 for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
562 fpu_spill_mask_ |= (1 << i);
563 }
564 }
565}
566
567void CodeGeneratorARMVIXL::GenerateFrameEntry() {
568 bool skip_overflow_check =
569 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
570 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
571 __ Bind(&frame_entry_label_);
572
573 if (HasEmptyFrame()) {
574 return;
575 }
576
Scott Wakelingfe885462016-09-22 10:24:38 +0100577 if (!skip_overflow_check) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100578 UseScratchRegisterScope temps(GetVIXLAssembler());
579 vixl32::Register temp = temps.Acquire();
Scott Wakelingfe885462016-09-22 10:24:38 +0100580 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
581 // The load must immediately precede RecordPcInfo.
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100582 AssemblerAccurateScope aas(GetVIXLAssembler(),
583 kArmInstrMaxSizeInBytes,
584 CodeBufferCheckScope::kMaximumSize);
585 __ ldr(temp, MemOperand(temp));
586 RecordPcInfo(nullptr, 0);
Scott Wakelingfe885462016-09-22 10:24:38 +0100587 }
588
589 __ Push(RegisterList(core_spill_mask_));
590 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(core_spill_mask_));
591 GetAssembler()->cfi().RelOffsetForMany(DWARFReg(kMethodRegister),
592 0,
593 core_spill_mask_,
594 kArmWordSize);
595 if (fpu_spill_mask_ != 0) {
596 uint32_t first = LeastSignificantBit(fpu_spill_mask_);
597
598 // Check that list is contiguous.
599 DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_)));
600
601 __ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_)));
602 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(fpu_spill_mask_));
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100603 GetAssembler()->cfi().RelOffsetForMany(DWARFReg(s0), 0, fpu_spill_mask_, kArmWordSize);
Scott Wakelingfe885462016-09-22 10:24:38 +0100604 }
605 int adjust = GetFrameSize() - FrameEntrySpillSize();
606 __ Sub(sp, sp, adjust);
607 GetAssembler()->cfi().AdjustCFAOffset(adjust);
608 GetAssembler()->StoreToOffset(kStoreWord, kMethodRegister, sp, 0);
609}
610
611void CodeGeneratorARMVIXL::GenerateFrameExit() {
612 if (HasEmptyFrame()) {
613 __ Bx(lr);
614 return;
615 }
616 GetAssembler()->cfi().RememberState();
617 int adjust = GetFrameSize() - FrameEntrySpillSize();
618 __ Add(sp, sp, adjust);
619 GetAssembler()->cfi().AdjustCFAOffset(-adjust);
620 if (fpu_spill_mask_ != 0) {
621 uint32_t first = LeastSignificantBit(fpu_spill_mask_);
622
623 // Check that list is contiguous.
624 DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_)));
625
626 __ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_)));
627 GetAssembler()->cfi().AdjustCFAOffset(
628 -static_cast<int>(kArmWordSize) * POPCOUNT(fpu_spill_mask_));
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100629 GetAssembler()->cfi().RestoreMany(DWARFReg(vixl32::SRegister(0)), fpu_spill_mask_);
Scott Wakelingfe885462016-09-22 10:24:38 +0100630 }
631 // Pop LR into PC to return.
632 DCHECK_NE(core_spill_mask_ & (1 << kLrCode), 0U);
633 uint32_t pop_mask = (core_spill_mask_ & (~(1 << kLrCode))) | 1 << kPcCode;
634 __ Pop(RegisterList(pop_mask));
635 GetAssembler()->cfi().RestoreState();
636 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
637}
638
639void CodeGeneratorARMVIXL::Bind(HBasicBlock* block) {
640 __ Bind(GetLabelOf(block));
641}
642
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100643void CodeGeneratorARMVIXL::Move32(Location destination, Location source) {
644 if (source.Equals(destination)) {
645 return;
646 }
647 if (destination.IsRegister()) {
648 if (source.IsRegister()) {
649 __ Mov(RegisterFrom(destination), RegisterFrom(source));
650 } else if (source.IsFpuRegister()) {
651 __ Vmov(RegisterFrom(destination), SRegisterFrom(source));
652 } else {
653 GetAssembler()->LoadFromOffset(kLoadWord,
654 RegisterFrom(destination),
655 sp,
656 source.GetStackIndex());
657 }
658 } else if (destination.IsFpuRegister()) {
659 if (source.IsRegister()) {
660 __ Vmov(SRegisterFrom(destination), RegisterFrom(source));
661 } else if (source.IsFpuRegister()) {
662 __ Vmov(SRegisterFrom(destination), SRegisterFrom(source));
663 } else {
664 GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex());
665 }
666 } else {
667 DCHECK(destination.IsStackSlot()) << destination;
668 if (source.IsRegister()) {
669 GetAssembler()->StoreToOffset(kStoreWord,
670 RegisterFrom(source),
671 sp,
672 destination.GetStackIndex());
673 } else if (source.IsFpuRegister()) {
674 GetAssembler()->StoreSToOffset(SRegisterFrom(source), sp, destination.GetStackIndex());
675 } else {
676 DCHECK(source.IsStackSlot()) << source;
677 UseScratchRegisterScope temps(GetVIXLAssembler());
678 vixl32::Register temp = temps.Acquire();
679 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex());
680 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
681 }
682 }
683}
684
685void CodeGeneratorARMVIXL::MoveConstant(Location destination ATTRIBUTE_UNUSED,
686 int32_t value ATTRIBUTE_UNUSED) {
Scott Wakelingfe885462016-09-22 10:24:38 +0100687 TODO_VIXL32(FATAL);
688}
689
690void CodeGeneratorARMVIXL::MoveLocation(Location dst, Location src, Primitive::Type dst_type) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100691 // TODO(VIXL): Maybe refactor to have the 'move' implementation here and use it in
692 // `ParallelMoveResolverARMVIXL::EmitMove`, as is done in the `arm64` backend.
693 HParallelMove move(GetGraph()->GetArena());
694 move.AddMove(src, dst, dst_type, nullptr);
695 GetMoveResolver()->EmitNativeCode(&move);
Scott Wakelingfe885462016-09-22 10:24:38 +0100696}
697
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100698void CodeGeneratorARMVIXL::AddLocationAsTemp(Location location ATTRIBUTE_UNUSED,
699 LocationSummary* locations ATTRIBUTE_UNUSED) {
Scott Wakelingfe885462016-09-22 10:24:38 +0100700 TODO_VIXL32(FATAL);
701}
702
703void CodeGeneratorARMVIXL::InvokeRuntime(QuickEntrypointEnum entrypoint,
704 HInstruction* instruction,
705 uint32_t dex_pc,
706 SlowPathCode* slow_path) {
707 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
708 GenerateInvokeRuntime(GetThreadOffset<kArmPointerSize>(entrypoint).Int32Value());
709 if (EntrypointRequiresStackMap(entrypoint)) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100710 // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the
711 // previous instruction.
Scott Wakelingfe885462016-09-22 10:24:38 +0100712 RecordPcInfo(instruction, dex_pc, slow_path);
713 }
714}
715
716void CodeGeneratorARMVIXL::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
717 HInstruction* instruction,
718 SlowPathCode* slow_path) {
719 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
720 GenerateInvokeRuntime(entry_point_offset);
721}
722
723void CodeGeneratorARMVIXL::GenerateInvokeRuntime(int32_t entry_point_offset) {
724 GetAssembler()->LoadFromOffset(kLoadWord, lr, tr, entry_point_offset);
725 __ Blx(lr);
726}
727
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100728void LocationsBuilderARMVIXL::VisitClinitCheck(HClinitCheck* check) {
729 LocationSummary* locations =
730 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
731 locations->SetInAt(0, Location::RequiresRegister());
732 if (check->HasUses()) {
733 locations->SetOut(Location::SameAsFirstInput());
734 }
735}
736
737void InstructionCodeGeneratorARMVIXL::VisitClinitCheck(HClinitCheck* check) {
738 // We assume the class is not null.
739 LoadClassSlowPathARMVIXL* slow_path =
740 new (GetGraph()->GetArena()) LoadClassSlowPathARMVIXL(check->GetLoadClass(),
741 check,
742 check->GetDexPc(),
743 /* do_clinit */ true);
744 codegen_->AddSlowPath(slow_path);
745 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
746}
747
748void InstructionCodeGeneratorARMVIXL::GenerateClassInitializationCheck(
749 LoadClassSlowPathARMVIXL* slow_path, vixl32::Register class_reg) {
750 UseScratchRegisterScope temps(GetVIXLAssembler());
751 vixl32::Register temp = temps.Acquire();
752 GetAssembler()->LoadFromOffset(kLoadWord,
753 temp,
754 class_reg,
755 mirror::Class::StatusOffset().Int32Value());
756 __ Cmp(temp, mirror::Class::kStatusInitialized);
757 __ B(lt, slow_path->GetEntryLabel());
758 // Even if the initialized flag is set, we may be in a situation where caches are not synced
759 // properly. Therefore, we do a memory fence.
760 __ Dmb(ISH);
761 __ Bind(slow_path->GetExitLabel());
762}
763
Scott Wakelingfe885462016-09-22 10:24:38 +0100764// Check if the desired_string_load_kind is supported. If it is, return it,
765// otherwise return a fall-back kind that should be used instead.
766HLoadString::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadStringKind(
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100767 HLoadString::LoadKind desired_string_load_kind ATTRIBUTE_UNUSED) {
768 // TODO(VIXL): Implement optimized code paths. For now we always use the simpler fallback code.
769 return HLoadString::LoadKind::kDexCacheViaMethod;
770}
771
772void LocationsBuilderARMVIXL::VisitLoadString(HLoadString* load) {
773 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
774 ? LocationSummary::kCallOnMainOnly
775 : LocationSummary::kNoCall;
776 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
777
778 // TODO(VIXL): Implement optimized code paths.
779 // See InstructionCodeGeneratorARMVIXL::VisitLoadString.
780 HLoadString::LoadKind load_kind = load->GetLoadKind();
781 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
782 locations->SetInAt(0, Location::RequiresRegister());
783 // TODO(VIXL): Use InvokeRuntimeCallingConventionARMVIXL instead.
784 locations->SetOut(LocationFrom(r0));
785 } else {
786 locations->SetOut(Location::RequiresRegister());
787 }
788}
789
790void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) {
791 // TODO(VIXL): Implement optimized code paths.
792 // We implemented the simplest solution to get first ART tests passing, we deferred the
793 // optimized path until later, we should implement it using ARM64 implementation as a
794 // reference. The same related to LocationsBuilderARMVIXL::VisitLoadString.
795
796 // TODO: Re-add the compiler code to do string dex cache lookup again.
797 DCHECK_EQ(load->GetLoadKind(), HLoadString::LoadKind::kDexCacheViaMethod);
798 InvokeRuntimeCallingConventionARMVIXL calling_convention;
799 __ Mov(calling_convention.GetRegisterAt(0), load->GetStringIndex());
800 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
801 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Scott Wakelingfe885462016-09-22 10:24:38 +0100802}
803
804// Check if the desired_class_load_kind is supported. If it is, return it,
805// otherwise return a fall-back kind that should be used instead.
806HLoadClass::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadClassKind(
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100807 HLoadClass::LoadKind desired_class_load_kind ATTRIBUTE_UNUSED) {
808 // TODO(VIXL): Implement optimized code paths.
809 return HLoadClass::LoadKind::kDexCacheViaMethod;
Scott Wakelingfe885462016-09-22 10:24:38 +0100810}
811
812// Check if the desired_dispatch_info is supported. If it is, return it,
813// otherwise return a fall-back info that should be used instead.
814HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARMVIXL::GetSupportedInvokeStaticOrDirectDispatch(
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100815 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info ATTRIBUTE_UNUSED,
816 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
817 // TODO(VIXL): Implement optimized code paths.
818 return {
819 HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod,
820 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
821 0u,
822 0u
823 };
Scott Wakelingfe885462016-09-22 10:24:38 +0100824}
825
826// Copy the result of a call into the given target.
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100827void CodeGeneratorARMVIXL::MoveFromReturnRegister(Location trg ATTRIBUTE_UNUSED,
828 Primitive::Type type ATTRIBUTE_UNUSED) {
Scott Wakelingfe885462016-09-22 10:24:38 +0100829 TODO_VIXL32(FATAL);
830}
831
832void InstructionCodeGeneratorARMVIXL::HandleGoto(HInstruction* got, HBasicBlock* successor) {
833 DCHECK(!successor->IsExitBlock());
834 HBasicBlock* block = got->GetBlock();
835 HInstruction* previous = got->GetPrevious();
836 HLoopInformation* info = block->GetLoopInformation();
837
838 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
839 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
840 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
841 return;
842 }
843 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
844 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
845 }
846 if (!codegen_->GoesToNextBlock(block, successor)) {
847 __ B(codegen_->GetLabelOf(successor));
848 }
849}
850
851void LocationsBuilderARMVIXL::VisitGoto(HGoto* got) {
852 got->SetLocations(nullptr);
853}
854
855void InstructionCodeGeneratorARMVIXL::VisitGoto(HGoto* got) {
856 HandleGoto(got, got->GetSuccessor());
857}
858
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100859void LocationsBuilderARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) {
860 try_boundary->SetLocations(nullptr);
861}
862
863void InstructionCodeGeneratorARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) {
864 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
865 if (!successor->IsExitBlock()) {
866 HandleGoto(try_boundary, successor);
867 }
868}
869
Scott Wakelingfe885462016-09-22 10:24:38 +0100870void LocationsBuilderARMVIXL::VisitExit(HExit* exit) {
871 exit->SetLocations(nullptr);
872}
873
874void InstructionCodeGeneratorARMVIXL::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
875}
876
877void InstructionCodeGeneratorARMVIXL::GenerateVcmp(HInstruction* instruction) {
878 Primitive::Type type = instruction->InputAt(0)->GetType();
879 Location lhs_loc = instruction->GetLocations()->InAt(0);
880 Location rhs_loc = instruction->GetLocations()->InAt(1);
881 if (rhs_loc.IsConstant()) {
882 // 0.0 is the only immediate that can be encoded directly in
883 // a VCMP instruction.
884 //
885 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
886 // specify that in a floating-point comparison, positive zero
887 // and negative zero are considered equal, so we can use the
888 // literal 0.0 for both cases here.
889 //
890 // Note however that some methods (Float.equal, Float.compare,
891 // Float.compareTo, Double.equal, Double.compare,
892 // Double.compareTo, Math.max, Math.min, StrictMath.max,
893 // StrictMath.min) consider 0.0 to be (strictly) greater than
894 // -0.0. So if we ever translate calls to these methods into a
895 // HCompare instruction, we must handle the -0.0 case with
896 // care here.
897 DCHECK(rhs_loc.GetConstant()->IsArithmeticZero());
898 if (type == Primitive::kPrimFloat) {
899 __ Vcmp(F32, InputSRegisterAt(instruction, 0), 0.0);
900 } else {
901 DCHECK_EQ(type, Primitive::kPrimDouble);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100902 __ Vcmp(F64, FromLowSToD(LowSRegisterFrom(lhs_loc)), 0.0);
Scott Wakelingfe885462016-09-22 10:24:38 +0100903 }
904 } else {
905 if (type == Primitive::kPrimFloat) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100906 __ Vcmp(InputSRegisterAt(instruction, 0), InputSRegisterAt(instruction, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +0100907 } else {
908 DCHECK_EQ(type, Primitive::kPrimDouble);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100909 __ Vcmp(FromLowSToD(LowSRegisterFrom(lhs_loc)), FromLowSToD(LowSRegisterFrom(rhs_loc)));
Scott Wakelingfe885462016-09-22 10:24:38 +0100910 }
911 }
912}
913
914void InstructionCodeGeneratorARMVIXL::GenerateFPJumps(HCondition* cond,
915 vixl32::Label* true_label,
916 vixl32::Label* false_label ATTRIBUTE_UNUSED) {
917 // To branch on the result of the FP compare we transfer FPSCR to APSR (encoded as PC in VMRS).
918 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
919 __ B(ARMFPCondition(cond->GetCondition(), cond->IsGtBias()), true_label);
920}
921
922void InstructionCodeGeneratorARMVIXL::GenerateLongComparesAndJumps(HCondition* cond,
923 vixl32::Label* true_label,
924 vixl32::Label* false_label) {
925 LocationSummary* locations = cond->GetLocations();
926 Location left = locations->InAt(0);
927 Location right = locations->InAt(1);
928 IfCondition if_cond = cond->GetCondition();
929
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100930 vixl32::Register left_high = HighRegisterFrom(left);
931 vixl32::Register left_low = LowRegisterFrom(left);
Scott Wakelingfe885462016-09-22 10:24:38 +0100932 IfCondition true_high_cond = if_cond;
933 IfCondition false_high_cond = cond->GetOppositeCondition();
934 vixl32::Condition final_condition = ARMUnsignedCondition(if_cond); // unsigned on lower part
935
936 // Set the conditions for the test, remembering that == needs to be
937 // decided using the low words.
938 // TODO: consider avoiding jumps with temporary and CMP low+SBC high
939 switch (if_cond) {
940 case kCondEQ:
941 case kCondNE:
942 // Nothing to do.
943 break;
944 case kCondLT:
945 false_high_cond = kCondGT;
946 break;
947 case kCondLE:
948 true_high_cond = kCondLT;
949 break;
950 case kCondGT:
951 false_high_cond = kCondLT;
952 break;
953 case kCondGE:
954 true_high_cond = kCondGT;
955 break;
956 case kCondB:
957 false_high_cond = kCondA;
958 break;
959 case kCondBE:
960 true_high_cond = kCondB;
961 break;
962 case kCondA:
963 false_high_cond = kCondB;
964 break;
965 case kCondAE:
966 true_high_cond = kCondA;
967 break;
968 }
969 if (right.IsConstant()) {
970 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
971 int32_t val_low = Low32Bits(value);
972 int32_t val_high = High32Bits(value);
973
974 __ Cmp(left_high, val_high);
975 if (if_cond == kCondNE) {
976 __ B(ARMCondition(true_high_cond), true_label);
977 } else if (if_cond == kCondEQ) {
978 __ B(ARMCondition(false_high_cond), false_label);
979 } else {
980 __ B(ARMCondition(true_high_cond), true_label);
981 __ B(ARMCondition(false_high_cond), false_label);
982 }
983 // Must be equal high, so compare the lows.
984 __ Cmp(left_low, val_low);
985 } else {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100986 vixl32::Register right_high = HighRegisterFrom(right);
987 vixl32::Register right_low = LowRegisterFrom(right);
Scott Wakelingfe885462016-09-22 10:24:38 +0100988
989 __ Cmp(left_high, right_high);
990 if (if_cond == kCondNE) {
991 __ B(ARMCondition(true_high_cond), true_label);
992 } else if (if_cond == kCondEQ) {
993 __ B(ARMCondition(false_high_cond), false_label);
994 } else {
995 __ B(ARMCondition(true_high_cond), true_label);
996 __ B(ARMCondition(false_high_cond), false_label);
997 }
998 // Must be equal high, so compare the lows.
999 __ Cmp(left_low, right_low);
1000 }
1001 // The last comparison might be unsigned.
1002 // TODO: optimize cases where this is always true/false
1003 __ B(final_condition, true_label);
1004}
1005
1006void InstructionCodeGeneratorARMVIXL::GenerateCompareTestAndBranch(HCondition* condition,
1007 vixl32::Label* true_target_in,
1008 vixl32::Label* false_target_in) {
1009 // Generated branching requires both targets to be explicit. If either of the
1010 // targets is nullptr (fallthrough) use and bind `fallthrough` instead.
1011 vixl32::Label fallthrough;
1012 vixl32::Label* true_target = (true_target_in == nullptr) ? &fallthrough : true_target_in;
1013 vixl32::Label* false_target = (false_target_in == nullptr) ? &fallthrough : false_target_in;
1014
1015 Primitive::Type type = condition->InputAt(0)->GetType();
1016 switch (type) {
1017 case Primitive::kPrimLong:
1018 GenerateLongComparesAndJumps(condition, true_target, false_target);
1019 break;
1020 case Primitive::kPrimFloat:
1021 case Primitive::kPrimDouble:
1022 GenerateVcmp(condition);
1023 GenerateFPJumps(condition, true_target, false_target);
1024 break;
1025 default:
1026 LOG(FATAL) << "Unexpected compare type " << type;
1027 }
1028
1029 if (false_target != &fallthrough) {
1030 __ B(false_target);
1031 }
1032
1033 if (true_target_in == nullptr || false_target_in == nullptr) {
1034 __ Bind(&fallthrough);
1035 }
1036}
1037
1038void InstructionCodeGeneratorARMVIXL::GenerateTestAndBranch(HInstruction* instruction,
1039 size_t condition_input_index,
1040 vixl32::Label* true_target,
1041 vixl32::Label* false_target) {
1042 HInstruction* cond = instruction->InputAt(condition_input_index);
1043
1044 if (true_target == nullptr && false_target == nullptr) {
1045 // Nothing to do. The code always falls through.
1046 return;
1047 } else if (cond->IsIntConstant()) {
1048 // Constant condition, statically compared against "true" (integer value 1).
1049 if (cond->AsIntConstant()->IsTrue()) {
1050 if (true_target != nullptr) {
1051 __ B(true_target);
1052 }
1053 } else {
1054 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
1055 if (false_target != nullptr) {
1056 __ B(false_target);
1057 }
1058 }
1059 return;
1060 }
1061
1062 // The following code generates these patterns:
1063 // (1) true_target == nullptr && false_target != nullptr
1064 // - opposite condition true => branch to false_target
1065 // (2) true_target != nullptr && false_target == nullptr
1066 // - condition true => branch to true_target
1067 // (3) true_target != nullptr && false_target != nullptr
1068 // - condition true => branch to true_target
1069 // - branch to false_target
1070 if (IsBooleanValueOrMaterializedCondition(cond)) {
1071 // Condition has been materialized, compare the output to 0.
1072 if (kIsDebugBuild) {
1073 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
1074 DCHECK(cond_val.IsRegister());
1075 }
1076 if (true_target == nullptr) {
1077 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
1078 } else {
1079 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
1080 }
1081 } else {
1082 // Condition has not been materialized. Use its inputs as the comparison and
1083 // its condition as the branch condition.
1084 HCondition* condition = cond->AsCondition();
1085
1086 // If this is a long or FP comparison that has been folded into
1087 // the HCondition, generate the comparison directly.
1088 Primitive::Type type = condition->InputAt(0)->GetType();
1089 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1090 GenerateCompareTestAndBranch(condition, true_target, false_target);
1091 return;
1092 }
1093
1094 LocationSummary* locations = cond->GetLocations();
1095 DCHECK(locations->InAt(0).IsRegister());
1096 vixl32::Register left = InputRegisterAt(cond, 0);
1097 Location right = locations->InAt(1);
1098 if (right.IsRegister()) {
1099 __ Cmp(left, InputRegisterAt(cond, 1));
1100 } else {
1101 DCHECK(right.IsConstant());
1102 __ Cmp(left, CodeGenerator::GetInt32ValueOf(right.GetConstant()));
1103 }
1104 if (true_target == nullptr) {
1105 __ B(ARMCondition(condition->GetOppositeCondition()), false_target);
1106 } else {
1107 __ B(ARMCondition(condition->GetCondition()), true_target);
1108 }
1109 }
1110
1111 // If neither branch falls through (case 3), the conditional branch to `true_target`
1112 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1113 if (true_target != nullptr && false_target != nullptr) {
1114 __ B(false_target);
1115 }
1116}
1117
1118void LocationsBuilderARMVIXL::VisitIf(HIf* if_instr) {
1119 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1120 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
1121 locations->SetInAt(0, Location::RequiresRegister());
1122 }
1123}
1124
1125void InstructionCodeGeneratorARMVIXL::VisitIf(HIf* if_instr) {
1126 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1127 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001128 vixl32::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1129 nullptr : codegen_->GetLabelOf(true_successor);
1130 vixl32::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1131 nullptr : codegen_->GetLabelOf(false_successor);
Scott Wakelingfe885462016-09-22 10:24:38 +01001132 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
1133}
1134
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001135void LocationsBuilderARMVIXL::VisitSelect(HSelect* select) {
1136 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1137 if (Primitive::IsFloatingPointType(select->GetType())) {
1138 locations->SetInAt(0, Location::RequiresFpuRegister());
1139 locations->SetInAt(1, Location::RequiresFpuRegister());
1140 } else {
1141 locations->SetInAt(0, Location::RequiresRegister());
1142 locations->SetInAt(1, Location::RequiresRegister());
1143 }
1144 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1145 locations->SetInAt(2, Location::RequiresRegister());
1146 }
1147 locations->SetOut(Location::SameAsFirstInput());
1148}
1149
1150void InstructionCodeGeneratorARMVIXL::VisitSelect(HSelect* select) {
1151 LocationSummary* locations = select->GetLocations();
1152 vixl32::Label false_target;
1153 GenerateTestAndBranch(select,
1154 /* condition_input_index */ 2,
1155 /* true_target */ nullptr,
1156 &false_target);
1157 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1158 __ Bind(&false_target);
1159}
1160
Scott Wakelingfe885462016-09-22 10:24:38 +01001161void CodeGeneratorARMVIXL::GenerateNop() {
1162 __ Nop();
1163}
1164
1165void LocationsBuilderARMVIXL::HandleCondition(HCondition* cond) {
1166 LocationSummary* locations =
1167 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
1168 // Handle the long/FP comparisons made in instruction simplification.
1169 switch (cond->InputAt(0)->GetType()) {
1170 case Primitive::kPrimLong:
1171 locations->SetInAt(0, Location::RequiresRegister());
1172 locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1)));
1173 if (!cond->IsEmittedAtUseSite()) {
1174 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1175 }
1176 break;
1177
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001178 // TODO(VIXL): https://android-review.googlesource.com/#/c/252265/
Scott Wakelingfe885462016-09-22 10:24:38 +01001179 case Primitive::kPrimFloat:
1180 case Primitive::kPrimDouble:
1181 locations->SetInAt(0, Location::RequiresFpuRegister());
1182 locations->SetInAt(1, Location::RequiresFpuRegister());
1183 if (!cond->IsEmittedAtUseSite()) {
1184 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1185 }
1186 break;
1187
1188 default:
1189 locations->SetInAt(0, Location::RequiresRegister());
1190 locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1)));
1191 if (!cond->IsEmittedAtUseSite()) {
1192 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1193 }
1194 }
1195}
1196
1197void InstructionCodeGeneratorARMVIXL::HandleCondition(HCondition* cond) {
1198 if (cond->IsEmittedAtUseSite()) {
1199 return;
1200 }
1201
Scott Wakelingfe885462016-09-22 10:24:38 +01001202 vixl32::Register out = OutputRegister(cond);
1203 vixl32::Label true_label, false_label;
1204
1205 switch (cond->InputAt(0)->GetType()) {
1206 default: {
1207 // Integer case.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001208 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
1209 AssemblerAccurateScope aas(GetVIXLAssembler(),
1210 kArmInstrMaxSizeInBytes * 3u,
1211 CodeBufferCheckScope::kMaximumSize);
1212 __ ite(ARMCondition(cond->GetCondition()));
1213 __ mov(ARMCondition(cond->GetCondition()), OutputRegister(cond), 1);
1214 __ mov(ARMCondition(cond->GetOppositeCondition()), OutputRegister(cond), 0);
Scott Wakelingfe885462016-09-22 10:24:38 +01001215 return;
1216 }
1217 case Primitive::kPrimLong:
1218 GenerateLongComparesAndJumps(cond, &true_label, &false_label);
1219 break;
1220 case Primitive::kPrimFloat:
1221 case Primitive::kPrimDouble:
1222 GenerateVcmp(cond);
1223 GenerateFPJumps(cond, &true_label, &false_label);
1224 break;
1225 }
1226
1227 // Convert the jumps into the result.
1228 vixl32::Label done_label;
1229
1230 // False case: result = 0.
1231 __ Bind(&false_label);
1232 __ Mov(out, 0);
1233 __ B(&done_label);
1234
1235 // True case: result = 1.
1236 __ Bind(&true_label);
1237 __ Mov(out, 1);
1238 __ Bind(&done_label);
1239}
1240
1241void LocationsBuilderARMVIXL::VisitEqual(HEqual* comp) {
1242 HandleCondition(comp);
1243}
1244
1245void InstructionCodeGeneratorARMVIXL::VisitEqual(HEqual* comp) {
1246 HandleCondition(comp);
1247}
1248
1249void LocationsBuilderARMVIXL::VisitNotEqual(HNotEqual* comp) {
1250 HandleCondition(comp);
1251}
1252
1253void InstructionCodeGeneratorARMVIXL::VisitNotEqual(HNotEqual* comp) {
1254 HandleCondition(comp);
1255}
1256
1257void LocationsBuilderARMVIXL::VisitLessThan(HLessThan* comp) {
1258 HandleCondition(comp);
1259}
1260
1261void InstructionCodeGeneratorARMVIXL::VisitLessThan(HLessThan* comp) {
1262 HandleCondition(comp);
1263}
1264
1265void LocationsBuilderARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1266 HandleCondition(comp);
1267}
1268
1269void InstructionCodeGeneratorARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1270 HandleCondition(comp);
1271}
1272
1273void LocationsBuilderARMVIXL::VisitGreaterThan(HGreaterThan* comp) {
1274 HandleCondition(comp);
1275}
1276
1277void InstructionCodeGeneratorARMVIXL::VisitGreaterThan(HGreaterThan* comp) {
1278 HandleCondition(comp);
1279}
1280
1281void LocationsBuilderARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1282 HandleCondition(comp);
1283}
1284
1285void InstructionCodeGeneratorARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1286 HandleCondition(comp);
1287}
1288
1289void LocationsBuilderARMVIXL::VisitBelow(HBelow* comp) {
1290 HandleCondition(comp);
1291}
1292
1293void InstructionCodeGeneratorARMVIXL::VisitBelow(HBelow* comp) {
1294 HandleCondition(comp);
1295}
1296
1297void LocationsBuilderARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) {
1298 HandleCondition(comp);
1299}
1300
1301void InstructionCodeGeneratorARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) {
1302 HandleCondition(comp);
1303}
1304
1305void LocationsBuilderARMVIXL::VisitAbove(HAbove* comp) {
1306 HandleCondition(comp);
1307}
1308
1309void InstructionCodeGeneratorARMVIXL::VisitAbove(HAbove* comp) {
1310 HandleCondition(comp);
1311}
1312
1313void LocationsBuilderARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) {
1314 HandleCondition(comp);
1315}
1316
1317void InstructionCodeGeneratorARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) {
1318 HandleCondition(comp);
1319}
1320
1321void LocationsBuilderARMVIXL::VisitIntConstant(HIntConstant* constant) {
1322 LocationSummary* locations =
1323 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1324 locations->SetOut(Location::ConstantLocation(constant));
1325}
1326
1327void InstructionCodeGeneratorARMVIXL::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
1328 // Will be generated at use site.
1329}
1330
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001331void LocationsBuilderARMVIXL::VisitNullConstant(HNullConstant* constant) {
1332 LocationSummary* locations =
1333 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1334 locations->SetOut(Location::ConstantLocation(constant));
1335}
1336
1337void InstructionCodeGeneratorARMVIXL::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
1338 // Will be generated at use site.
1339}
1340
Scott Wakelingfe885462016-09-22 10:24:38 +01001341void LocationsBuilderARMVIXL::VisitLongConstant(HLongConstant* constant) {
1342 LocationSummary* locations =
1343 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1344 locations->SetOut(Location::ConstantLocation(constant));
1345}
1346
1347void InstructionCodeGeneratorARMVIXL::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
1348 // Will be generated at use site.
1349}
1350
1351void LocationsBuilderARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1352 memory_barrier->SetLocations(nullptr);
1353}
1354
1355void InstructionCodeGeneratorARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1356 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
1357}
1358
1359void LocationsBuilderARMVIXL::VisitReturnVoid(HReturnVoid* ret) {
1360 ret->SetLocations(nullptr);
1361}
1362
1363void InstructionCodeGeneratorARMVIXL::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
1364 codegen_->GenerateFrameExit();
1365}
1366
1367void LocationsBuilderARMVIXL::VisitReturn(HReturn* ret) {
1368 LocationSummary* locations =
1369 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1370 locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1371}
1372
1373void InstructionCodeGeneratorARMVIXL::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
1374 codegen_->GenerateFrameExit();
1375}
1376
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001377void LocationsBuilderARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1378 // Explicit clinit checks triggered by static invokes must have been pruned by
1379 // art::PrepareForRegisterAllocation.
1380 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
1381
1382 // TODO(VIXL): TryDispatch
1383
1384 HandleInvoke(invoke);
1385}
1386
1387void InstructionCodeGeneratorARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1388 // Explicit clinit checks triggered by static invokes must have been pruned by
1389 // art::PrepareForRegisterAllocation.
1390 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
1391
1392 // TODO(VIXL): TryGenerateIntrinsicCode
1393
1394 LocationSummary* locations = invoke->GetLocations();
1395 DCHECK(locations->HasTemps());
1396 codegen_->GenerateStaticOrDirectCall(invoke, locations->GetTemp(0));
1397 // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the
1398 // previous instruction.
1399 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1400}
1401
1402void LocationsBuilderARMVIXL::HandleInvoke(HInvoke* invoke) {
1403 InvokeDexCallingConventionVisitorARM calling_convention_visitor;
1404 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
1405}
1406
1407void LocationsBuilderARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1408 // TODO(VIXL): TryDispatch
1409
1410 HandleInvoke(invoke);
1411}
1412
1413void InstructionCodeGeneratorARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1414 // TODO(VIXL): TryGenerateIntrinsicCode
1415
1416 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
1417 DCHECK(!codegen_->IsLeafMethod());
1418 // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the
1419 // previous instruction.
1420 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1421}
1422
Scott Wakelingfe885462016-09-22 10:24:38 +01001423void LocationsBuilderARMVIXL::VisitTypeConversion(HTypeConversion* conversion) {
1424 Primitive::Type result_type = conversion->GetResultType();
1425 Primitive::Type input_type = conversion->GetInputType();
1426 DCHECK_NE(result_type, input_type);
1427
1428 // The float-to-long, double-to-long and long-to-float type conversions
1429 // rely on a call to the runtime.
1430 LocationSummary::CallKind call_kind =
1431 (((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1432 && result_type == Primitive::kPrimLong)
1433 || (input_type == Primitive::kPrimLong && result_type == Primitive::kPrimFloat))
1434 ? LocationSummary::kCallOnMainOnly
1435 : LocationSummary::kNoCall;
1436 LocationSummary* locations =
1437 new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1438
1439 // The Java language does not allow treating boolean as an integral type but
1440 // our bit representation makes it safe.
1441
1442 switch (result_type) {
1443 case Primitive::kPrimByte:
1444 switch (input_type) {
1445 case Primitive::kPrimLong:
1446 // Type conversion from long to byte is a result of code transformations.
1447 case Primitive::kPrimBoolean:
1448 // Boolean input is a result of code transformations.
1449 case Primitive::kPrimShort:
1450 case Primitive::kPrimInt:
1451 case Primitive::kPrimChar:
1452 // Processing a Dex `int-to-byte' instruction.
1453 locations->SetInAt(0, Location::RequiresRegister());
1454 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1455 break;
1456
1457 default:
1458 LOG(FATAL) << "Unexpected type conversion from " << input_type
1459 << " to " << result_type;
1460 }
1461 break;
1462
1463 case Primitive::kPrimShort:
1464 switch (input_type) {
1465 case Primitive::kPrimLong:
1466 // Type conversion from long to short is a result of code transformations.
1467 case Primitive::kPrimBoolean:
1468 // Boolean input is a result of code transformations.
1469 case Primitive::kPrimByte:
1470 case Primitive::kPrimInt:
1471 case Primitive::kPrimChar:
1472 // Processing a Dex `int-to-short' instruction.
1473 locations->SetInAt(0, Location::RequiresRegister());
1474 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1475 break;
1476
1477 default:
1478 LOG(FATAL) << "Unexpected type conversion from " << input_type
1479 << " to " << result_type;
1480 }
1481 break;
1482
1483 case Primitive::kPrimInt:
1484 switch (input_type) {
1485 case Primitive::kPrimLong:
1486 // Processing a Dex `long-to-int' instruction.
1487 locations->SetInAt(0, Location::Any());
1488 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1489 break;
1490
1491 case Primitive::kPrimFloat:
1492 // Processing a Dex `float-to-int' instruction.
1493 locations->SetInAt(0, Location::RequiresFpuRegister());
1494 locations->SetOut(Location::RequiresRegister());
1495 locations->AddTemp(Location::RequiresFpuRegister());
1496 break;
1497
1498 case Primitive::kPrimDouble:
1499 // Processing a Dex `double-to-int' instruction.
1500 locations->SetInAt(0, Location::RequiresFpuRegister());
1501 locations->SetOut(Location::RequiresRegister());
1502 locations->AddTemp(Location::RequiresFpuRegister());
1503 break;
1504
1505 default:
1506 LOG(FATAL) << "Unexpected type conversion from " << input_type
1507 << " to " << result_type;
1508 }
1509 break;
1510
1511 case Primitive::kPrimLong:
1512 switch (input_type) {
1513 case Primitive::kPrimBoolean:
1514 // Boolean input is a result of code transformations.
1515 case Primitive::kPrimByte:
1516 case Primitive::kPrimShort:
1517 case Primitive::kPrimInt:
1518 case Primitive::kPrimChar:
1519 // Processing a Dex `int-to-long' instruction.
1520 locations->SetInAt(0, Location::RequiresRegister());
1521 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1522 break;
1523
1524 case Primitive::kPrimFloat: {
1525 // Processing a Dex `float-to-long' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001526 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1527 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1528 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01001529 break;
1530 }
1531
1532 case Primitive::kPrimDouble: {
1533 // Processing a Dex `double-to-long' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001534 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1535 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0),
1536 calling_convention.GetFpuRegisterAt(1)));
1537 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01001538 break;
1539 }
1540
1541 default:
1542 LOG(FATAL) << "Unexpected type conversion from " << input_type
1543 << " to " << result_type;
1544 }
1545 break;
1546
1547 case Primitive::kPrimChar:
1548 switch (input_type) {
1549 case Primitive::kPrimLong:
1550 // Type conversion from long to char is a result of code transformations.
1551 case Primitive::kPrimBoolean:
1552 // Boolean input is a result of code transformations.
1553 case Primitive::kPrimByte:
1554 case Primitive::kPrimShort:
1555 case Primitive::kPrimInt:
1556 // Processing a Dex `int-to-char' instruction.
1557 locations->SetInAt(0, Location::RequiresRegister());
1558 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1559 break;
1560
1561 default:
1562 LOG(FATAL) << "Unexpected type conversion from " << input_type
1563 << " to " << result_type;
1564 }
1565 break;
1566
1567 case Primitive::kPrimFloat:
1568 switch (input_type) {
1569 case Primitive::kPrimBoolean:
1570 // Boolean input is a result of code transformations.
1571 case Primitive::kPrimByte:
1572 case Primitive::kPrimShort:
1573 case Primitive::kPrimInt:
1574 case Primitive::kPrimChar:
1575 // Processing a Dex `int-to-float' instruction.
1576 locations->SetInAt(0, Location::RequiresRegister());
1577 locations->SetOut(Location::RequiresFpuRegister());
1578 break;
1579
1580 case Primitive::kPrimLong: {
1581 // Processing a Dex `long-to-float' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001582 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1583 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0),
1584 calling_convention.GetRegisterAt(1)));
1585 locations->SetOut(LocationFrom(calling_convention.GetFpuRegisterAt(0)));
Scott Wakelingfe885462016-09-22 10:24:38 +01001586 break;
1587 }
1588
1589 case Primitive::kPrimDouble:
1590 // Processing a Dex `double-to-float' instruction.
1591 locations->SetInAt(0, Location::RequiresFpuRegister());
1592 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1593 break;
1594
1595 default:
1596 LOG(FATAL) << "Unexpected type conversion from " << input_type
1597 << " to " << result_type;
1598 };
1599 break;
1600
1601 case Primitive::kPrimDouble:
1602 switch (input_type) {
1603 case Primitive::kPrimBoolean:
1604 // Boolean input is a result of code transformations.
1605 case Primitive::kPrimByte:
1606 case Primitive::kPrimShort:
1607 case Primitive::kPrimInt:
1608 case Primitive::kPrimChar:
1609 // Processing a Dex `int-to-double' instruction.
1610 locations->SetInAt(0, Location::RequiresRegister());
1611 locations->SetOut(Location::RequiresFpuRegister());
1612 break;
1613
1614 case Primitive::kPrimLong:
1615 // Processing a Dex `long-to-double' instruction.
1616 locations->SetInAt(0, Location::RequiresRegister());
1617 locations->SetOut(Location::RequiresFpuRegister());
1618 locations->AddTemp(Location::RequiresFpuRegister());
1619 locations->AddTemp(Location::RequiresFpuRegister());
1620 break;
1621
1622 case Primitive::kPrimFloat:
1623 // Processing a Dex `float-to-double' instruction.
1624 locations->SetInAt(0, Location::RequiresFpuRegister());
1625 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1626 break;
1627
1628 default:
1629 LOG(FATAL) << "Unexpected type conversion from " << input_type
1630 << " to " << result_type;
1631 };
1632 break;
1633
1634 default:
1635 LOG(FATAL) << "Unexpected type conversion from " << input_type
1636 << " to " << result_type;
1637 }
1638}
1639
1640void InstructionCodeGeneratorARMVIXL::VisitTypeConversion(HTypeConversion* conversion) {
1641 LocationSummary* locations = conversion->GetLocations();
1642 Location out = locations->Out();
1643 Location in = locations->InAt(0);
1644 Primitive::Type result_type = conversion->GetResultType();
1645 Primitive::Type input_type = conversion->GetInputType();
1646 DCHECK_NE(result_type, input_type);
1647 switch (result_type) {
1648 case Primitive::kPrimByte:
1649 switch (input_type) {
1650 case Primitive::kPrimLong:
1651 // Type conversion from long to byte is a result of code transformations.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001652 __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 8);
Scott Wakelingfe885462016-09-22 10:24:38 +01001653 break;
1654 case Primitive::kPrimBoolean:
1655 // Boolean input is a result of code transformations.
1656 case Primitive::kPrimShort:
1657 case Primitive::kPrimInt:
1658 case Primitive::kPrimChar:
1659 // Processing a Dex `int-to-byte' instruction.
1660 __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 8);
1661 break;
1662
1663 default:
1664 LOG(FATAL) << "Unexpected type conversion from " << input_type
1665 << " to " << result_type;
1666 }
1667 break;
1668
1669 case Primitive::kPrimShort:
1670 switch (input_type) {
1671 case Primitive::kPrimLong:
1672 // Type conversion from long to short is a result of code transformations.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001673 __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16);
Scott Wakelingfe885462016-09-22 10:24:38 +01001674 break;
1675 case Primitive::kPrimBoolean:
1676 // Boolean input is a result of code transformations.
1677 case Primitive::kPrimByte:
1678 case Primitive::kPrimInt:
1679 case Primitive::kPrimChar:
1680 // Processing a Dex `int-to-short' instruction.
1681 __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16);
1682 break;
1683
1684 default:
1685 LOG(FATAL) << "Unexpected type conversion from " << input_type
1686 << " to " << result_type;
1687 }
1688 break;
1689
1690 case Primitive::kPrimInt:
1691 switch (input_type) {
1692 case Primitive::kPrimLong:
1693 // Processing a Dex `long-to-int' instruction.
1694 DCHECK(out.IsRegister());
1695 if (in.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001696 __ Mov(OutputRegister(conversion), LowRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01001697 } else if (in.IsDoubleStackSlot()) {
1698 GetAssembler()->LoadFromOffset(kLoadWord,
1699 OutputRegister(conversion),
1700 sp,
1701 in.GetStackIndex());
1702 } else {
1703 DCHECK(in.IsConstant());
1704 DCHECK(in.GetConstant()->IsLongConstant());
1705 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1706 __ Mov(OutputRegister(conversion), static_cast<int32_t>(value));
1707 }
1708 break;
1709
1710 case Primitive::kPrimFloat: {
1711 // Processing a Dex `float-to-int' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001712 vixl32::SRegister temp = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingfe885462016-09-22 10:24:38 +01001713 __ Vcvt(I32, F32, temp, InputSRegisterAt(conversion, 0));
1714 __ Vmov(OutputRegister(conversion), temp);
1715 break;
1716 }
1717
1718 case Primitive::kPrimDouble: {
1719 // Processing a Dex `double-to-int' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001720 vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0));
1721 __ Vcvt(I32, F64, temp_s, FromLowSToD(LowSRegisterFrom(in)));
Scott Wakelingfe885462016-09-22 10:24:38 +01001722 __ Vmov(OutputRegister(conversion), temp_s);
1723 break;
1724 }
1725
1726 default:
1727 LOG(FATAL) << "Unexpected type conversion from " << input_type
1728 << " to " << result_type;
1729 }
1730 break;
1731
1732 case Primitive::kPrimLong:
1733 switch (input_type) {
1734 case Primitive::kPrimBoolean:
1735 // Boolean input is a result of code transformations.
1736 case Primitive::kPrimByte:
1737 case Primitive::kPrimShort:
1738 case Primitive::kPrimInt:
1739 case Primitive::kPrimChar:
1740 // Processing a Dex `int-to-long' instruction.
1741 DCHECK(out.IsRegisterPair());
1742 DCHECK(in.IsRegister());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001743 __ Mov(LowRegisterFrom(out), InputRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01001744 // Sign extension.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001745 __ Asr(HighRegisterFrom(out), LowRegisterFrom(out), 31);
Scott Wakelingfe885462016-09-22 10:24:38 +01001746 break;
1747
1748 case Primitive::kPrimFloat:
1749 // Processing a Dex `float-to-long' instruction.
1750 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
1751 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
1752 break;
1753
1754 case Primitive::kPrimDouble:
1755 // Processing a Dex `double-to-long' instruction.
1756 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
1757 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
1758 break;
1759
1760 default:
1761 LOG(FATAL) << "Unexpected type conversion from " << input_type
1762 << " to " << result_type;
1763 }
1764 break;
1765
1766 case Primitive::kPrimChar:
1767 switch (input_type) {
1768 case Primitive::kPrimLong:
1769 // Type conversion from long to char is a result of code transformations.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001770 __ Ubfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16);
Scott Wakelingfe885462016-09-22 10:24:38 +01001771 break;
1772 case Primitive::kPrimBoolean:
1773 // Boolean input is a result of code transformations.
1774 case Primitive::kPrimByte:
1775 case Primitive::kPrimShort:
1776 case Primitive::kPrimInt:
1777 // Processing a Dex `int-to-char' instruction.
1778 __ Ubfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16);
1779 break;
1780
1781 default:
1782 LOG(FATAL) << "Unexpected type conversion from " << input_type
1783 << " to " << result_type;
1784 }
1785 break;
1786
1787 case Primitive::kPrimFloat:
1788 switch (input_type) {
1789 case Primitive::kPrimBoolean:
1790 // Boolean input is a result of code transformations.
1791 case Primitive::kPrimByte:
1792 case Primitive::kPrimShort:
1793 case Primitive::kPrimInt:
1794 case Primitive::kPrimChar: {
1795 // Processing a Dex `int-to-float' instruction.
1796 __ Vmov(OutputSRegister(conversion), InputRegisterAt(conversion, 0));
1797 __ Vcvt(F32, I32, OutputSRegister(conversion), OutputSRegister(conversion));
1798 break;
1799 }
1800
1801 case Primitive::kPrimLong:
1802 // Processing a Dex `long-to-float' instruction.
1803 codegen_->InvokeRuntime(kQuickL2f, conversion, conversion->GetDexPc());
1804 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
1805 break;
1806
1807 case Primitive::kPrimDouble:
1808 // Processing a Dex `double-to-float' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001809 __ Vcvt(F32, F64, OutputSRegister(conversion), FromLowSToD(LowSRegisterFrom(in)));
Scott Wakelingfe885462016-09-22 10:24:38 +01001810 break;
1811
1812 default:
1813 LOG(FATAL) << "Unexpected type conversion from " << input_type
1814 << " to " << result_type;
1815 };
1816 break;
1817
1818 case Primitive::kPrimDouble:
1819 switch (input_type) {
1820 case Primitive::kPrimBoolean:
1821 // Boolean input is a result of code transformations.
1822 case Primitive::kPrimByte:
1823 case Primitive::kPrimShort:
1824 case Primitive::kPrimInt:
1825 case Primitive::kPrimChar: {
1826 // Processing a Dex `int-to-double' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001827 __ Vmov(LowSRegisterFrom(out), InputRegisterAt(conversion, 0));
1828 __ Vcvt(F64, I32, FromLowSToD(LowSRegisterFrom(out)), LowSRegisterFrom(out));
Scott Wakelingfe885462016-09-22 10:24:38 +01001829 break;
1830 }
1831
1832 case Primitive::kPrimLong: {
1833 // Processing a Dex `long-to-double' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001834 vixl32::Register low = LowRegisterFrom(in);
1835 vixl32::Register high = HighRegisterFrom(in);
Scott Wakelingfe885462016-09-22 10:24:38 +01001836
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001837 vixl32::SRegister out_s = LowSRegisterFrom(out);
Scott Wakelingfe885462016-09-22 10:24:38 +01001838 vixl32::DRegister out_d = FromLowSToD(out_s);
1839
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001840 vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingfe885462016-09-22 10:24:38 +01001841 vixl32::DRegister temp_d = FromLowSToD(temp_s);
1842
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001843 vixl32::SRegister constant_s = LowSRegisterFrom(locations->GetTemp(1));
Scott Wakelingfe885462016-09-22 10:24:38 +01001844 vixl32::DRegister constant_d = FromLowSToD(constant_s);
1845
1846 // temp_d = int-to-double(high)
1847 __ Vmov(temp_s, high);
1848 __ Vcvt(F64, I32, temp_d, temp_s);
1849 // constant_d = k2Pow32EncodingForDouble
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001850 __ Vmov(constant_d, bit_cast<double, int64_t>(k2Pow32EncodingForDouble));
Scott Wakelingfe885462016-09-22 10:24:38 +01001851 // out_d = unsigned-to-double(low)
1852 __ Vmov(out_s, low);
1853 __ Vcvt(F64, U32, out_d, out_s);
1854 // out_d += temp_d * constant_d
1855 __ Vmla(F64, out_d, temp_d, constant_d);
1856 break;
1857 }
1858
1859 case Primitive::kPrimFloat:
1860 // Processing a Dex `float-to-double' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001861 __ Vcvt(F64, F32, FromLowSToD(LowSRegisterFrom(out)), InputSRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01001862 break;
1863
1864 default:
1865 LOG(FATAL) << "Unexpected type conversion from " << input_type
1866 << " to " << result_type;
1867 };
1868 break;
1869
1870 default:
1871 LOG(FATAL) << "Unexpected type conversion from " << input_type
1872 << " to " << result_type;
1873 }
1874}
1875
1876void LocationsBuilderARMVIXL::VisitAdd(HAdd* add) {
1877 LocationSummary* locations =
1878 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1879 switch (add->GetResultType()) {
1880 case Primitive::kPrimInt: {
1881 locations->SetInAt(0, Location::RequiresRegister());
1882 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1883 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1884 break;
1885 }
1886
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001887 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01001888 case Primitive::kPrimLong: {
1889 locations->SetInAt(0, Location::RequiresRegister());
1890 locations->SetInAt(1, Location::RequiresRegister());
1891 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1892 break;
1893 }
1894
1895 case Primitive::kPrimFloat:
1896 case Primitive::kPrimDouble: {
1897 locations->SetInAt(0, Location::RequiresFpuRegister());
1898 locations->SetInAt(1, Location::RequiresFpuRegister());
1899 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1900 break;
1901 }
1902
1903 default:
1904 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1905 }
1906}
1907
1908void InstructionCodeGeneratorARMVIXL::VisitAdd(HAdd* add) {
1909 LocationSummary* locations = add->GetLocations();
1910 Location out = locations->Out();
1911 Location first = locations->InAt(0);
1912 Location second = locations->InAt(1);
1913
1914 switch (add->GetResultType()) {
1915 case Primitive::kPrimInt: {
1916 __ Add(OutputRegister(add), InputRegisterAt(add, 0), InputOperandAt(add, 1));
1917 }
1918 break;
1919
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001920 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01001921 case Primitive::kPrimLong: {
1922 DCHECK(second.IsRegisterPair());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001923 __ Adds(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
1924 __ Adc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
Scott Wakelingfe885462016-09-22 10:24:38 +01001925 break;
1926 }
1927
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001928 case Primitive::kPrimFloat:
Scott Wakelingfe885462016-09-22 10:24:38 +01001929 case Primitive::kPrimDouble:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001930 __ Vadd(OutputVRegister(add), InputVRegisterAt(add, 0), InputVRegisterAt(add, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01001931 break;
1932
1933 default:
1934 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1935 }
1936}
1937
1938void LocationsBuilderARMVIXL::VisitSub(HSub* sub) {
1939 LocationSummary* locations =
1940 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1941 switch (sub->GetResultType()) {
1942 case Primitive::kPrimInt: {
1943 locations->SetInAt(0, Location::RequiresRegister());
1944 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1945 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1946 break;
1947 }
1948
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001949 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01001950 case Primitive::kPrimLong: {
1951 locations->SetInAt(0, Location::RequiresRegister());
1952 locations->SetInAt(1, Location::RequiresRegister());
1953 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1954 break;
1955 }
1956 case Primitive::kPrimFloat:
1957 case Primitive::kPrimDouble: {
1958 locations->SetInAt(0, Location::RequiresFpuRegister());
1959 locations->SetInAt(1, Location::RequiresFpuRegister());
1960 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1961 break;
1962 }
1963 default:
1964 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1965 }
1966}
1967
1968void InstructionCodeGeneratorARMVIXL::VisitSub(HSub* sub) {
1969 LocationSummary* locations = sub->GetLocations();
1970 Location out = locations->Out();
1971 Location first = locations->InAt(0);
1972 Location second = locations->InAt(1);
1973 switch (sub->GetResultType()) {
1974 case Primitive::kPrimInt: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001975 __ Sub(OutputRegister(sub), InputRegisterAt(sub, 0), InputOperandAt(sub, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01001976 break;
1977 }
1978
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001979 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01001980 case Primitive::kPrimLong: {
1981 DCHECK(second.IsRegisterPair());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001982 __ Subs(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
1983 __ Sbc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
Scott Wakelingfe885462016-09-22 10:24:38 +01001984 break;
1985 }
1986
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001987 case Primitive::kPrimFloat:
1988 case Primitive::kPrimDouble:
1989 __ Vsub(OutputVRegister(sub), InputVRegisterAt(sub, 0), InputVRegisterAt(sub, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01001990 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01001991
1992 default:
1993 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1994 }
1995}
1996
1997void LocationsBuilderARMVIXL::VisitMul(HMul* mul) {
1998 LocationSummary* locations =
1999 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2000 switch (mul->GetResultType()) {
2001 case Primitive::kPrimInt:
2002 case Primitive::kPrimLong: {
2003 locations->SetInAt(0, Location::RequiresRegister());
2004 locations->SetInAt(1, Location::RequiresRegister());
2005 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2006 break;
2007 }
2008
2009 case Primitive::kPrimFloat:
2010 case Primitive::kPrimDouble: {
2011 locations->SetInAt(0, Location::RequiresFpuRegister());
2012 locations->SetInAt(1, Location::RequiresFpuRegister());
2013 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2014 break;
2015 }
2016
2017 default:
2018 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2019 }
2020}
2021
2022void InstructionCodeGeneratorARMVIXL::VisitMul(HMul* mul) {
2023 LocationSummary* locations = mul->GetLocations();
2024 Location out = locations->Out();
2025 Location first = locations->InAt(0);
2026 Location second = locations->InAt(1);
2027 switch (mul->GetResultType()) {
2028 case Primitive::kPrimInt: {
2029 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2030 break;
2031 }
2032 case Primitive::kPrimLong: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002033 vixl32::Register out_hi = HighRegisterFrom(out);
2034 vixl32::Register out_lo = LowRegisterFrom(out);
2035 vixl32::Register in1_hi = HighRegisterFrom(first);
2036 vixl32::Register in1_lo = LowRegisterFrom(first);
2037 vixl32::Register in2_hi = HighRegisterFrom(second);
2038 vixl32::Register in2_lo = LowRegisterFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01002039
2040 // Extra checks to protect caused by the existence of R1_R2.
2041 // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2042 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2043 DCHECK_NE(out_hi.GetCode(), in1_lo.GetCode());
2044 DCHECK_NE(out_hi.GetCode(), in2_lo.GetCode());
2045
2046 // input: in1 - 64 bits, in2 - 64 bits
2047 // output: out
2048 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2049 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2050 // parts: out.lo = (in1.lo * in2.lo)[31:0]
2051
2052 UseScratchRegisterScope temps(GetVIXLAssembler());
2053 vixl32::Register temp = temps.Acquire();
2054 // temp <- in1.lo * in2.hi
2055 __ Mul(temp, in1_lo, in2_hi);
2056 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2057 __ Mla(out_hi, in1_hi, in2_lo, temp);
2058 // out.lo <- (in1.lo * in2.lo)[31:0];
2059 __ Umull(out_lo, temp, in1_lo, in2_lo);
2060 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002061 __ Add(out_hi, out_hi, temp);
Scott Wakelingfe885462016-09-22 10:24:38 +01002062 break;
2063 }
2064
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002065 case Primitive::kPrimFloat:
2066 case Primitive::kPrimDouble:
2067 __ Vmul(OutputVRegister(mul), InputVRegisterAt(mul, 0), InputVRegisterAt(mul, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002068 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01002069
2070 default:
2071 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2072 }
2073}
2074
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002075void LocationsBuilderARMVIXL::VisitNewArray(HNewArray* instruction) {
2076 LocationSummary* locations =
2077 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
2078 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2079 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2080 locations->SetOut(LocationFrom(r0));
2081 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2082 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
2083}
2084
2085void InstructionCodeGeneratorARMVIXL::VisitNewArray(HNewArray* instruction) {
2086 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2087 __ Mov(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2088 // Note: if heap poisoning is enabled, the entry point takes cares
2089 // of poisoning the reference.
2090 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
2091 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
2092}
2093
2094void LocationsBuilderARMVIXL::VisitNewInstance(HNewInstance* instruction) {
2095 LocationSummary* locations =
2096 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
2097 if (instruction->IsStringAlloc()) {
2098 locations->AddTemp(LocationFrom(kMethodRegister));
2099 } else {
2100 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2101 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2102 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
2103 }
2104 locations->SetOut(LocationFrom(r0));
2105}
2106
2107void InstructionCodeGeneratorARMVIXL::VisitNewInstance(HNewInstance* instruction) {
2108 // Note: if heap poisoning is enabled, the entry point takes cares
2109 // of poisoning the reference.
2110 if (instruction->IsStringAlloc()) {
2111 // String is allocated through StringFactory. Call NewEmptyString entry point.
2112 vixl32::Register temp = RegisterFrom(instruction->GetLocations()->GetTemp(0));
2113 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize);
2114 GetAssembler()->LoadFromOffset(kLoadWord, temp, tr, QUICK_ENTRY_POINT(pNewEmptyString));
2115 GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, code_offset.Int32Value());
2116 AssemblerAccurateScope aas(GetVIXLAssembler(),
2117 kArmInstrMaxSizeInBytes,
2118 CodeBufferCheckScope::kMaximumSize);
2119 __ blx(lr);
2120 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2121 } else {
2122 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
2123 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
2124 }
2125}
2126
2127void LocationsBuilderARMVIXL::VisitParameterValue(HParameterValue* instruction) {
2128 LocationSummary* locations =
2129 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2130 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2131 if (location.IsStackSlot()) {
2132 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2133 } else if (location.IsDoubleStackSlot()) {
2134 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2135 }
2136 locations->SetOut(location);
2137}
2138
2139void InstructionCodeGeneratorARMVIXL::VisitParameterValue(
2140 HParameterValue* instruction ATTRIBUTE_UNUSED) {
2141 // Nothing to do, the parameter is already at its location.
2142}
2143
2144void LocationsBuilderARMVIXL::VisitCurrentMethod(HCurrentMethod* instruction) {
2145 LocationSummary* locations =
2146 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2147 locations->SetOut(LocationFrom(kMethodRegister));
2148}
2149
2150void InstructionCodeGeneratorARMVIXL::VisitCurrentMethod(
2151 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
2152 // Nothing to do, the method is already at its location.
2153}
2154
Scott Wakelingfe885462016-09-22 10:24:38 +01002155void LocationsBuilderARMVIXL::VisitNot(HNot* not_) {
2156 LocationSummary* locations =
2157 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2158 locations->SetInAt(0, Location::RequiresRegister());
2159 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2160}
2161
2162void InstructionCodeGeneratorARMVIXL::VisitNot(HNot* not_) {
2163 LocationSummary* locations = not_->GetLocations();
2164 Location out = locations->Out();
2165 Location in = locations->InAt(0);
2166 switch (not_->GetResultType()) {
2167 case Primitive::kPrimInt:
2168 __ Mvn(OutputRegister(not_), InputRegisterAt(not_, 0));
2169 break;
2170
2171 case Primitive::kPrimLong:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002172 __ Mvn(LowRegisterFrom(out), LowRegisterFrom(in));
2173 __ Mvn(HighRegisterFrom(out), HighRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01002174 break;
2175
2176 default:
2177 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2178 }
2179}
2180
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002181void LocationsBuilderARMVIXL::VisitPhi(HPhi* instruction) {
2182 LocationSummary* locations =
2183 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2184 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
2185 locations->SetInAt(i, Location::Any());
2186 }
2187 locations->SetOut(Location::Any());
2188}
2189
2190void InstructionCodeGeneratorARMVIXL::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
2191 LOG(FATAL) << "Unreachable";
2192}
2193
Scott Wakelingfe885462016-09-22 10:24:38 +01002194void CodeGeneratorARMVIXL::GenerateMemoryBarrier(MemBarrierKind kind) {
2195 // TODO (ported from quick): revisit ARM barrier kinds.
2196 DmbOptions flavor = DmbOptions::ISH; // Quiet C++ warnings.
2197 switch (kind) {
2198 case MemBarrierKind::kAnyStore:
2199 case MemBarrierKind::kLoadAny:
2200 case MemBarrierKind::kAnyAny: {
2201 flavor = DmbOptions::ISH;
2202 break;
2203 }
2204 case MemBarrierKind::kStoreStore: {
2205 flavor = DmbOptions::ISHST;
2206 break;
2207 }
2208 default:
2209 LOG(FATAL) << "Unexpected memory barrier " << kind;
2210 }
2211 __ Dmb(flavor);
2212}
2213
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002214void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicLoad(vixl32::Register addr,
2215 uint32_t offset,
2216 vixl32::Register out_lo,
2217 vixl32::Register out_hi) {
2218 UseScratchRegisterScope temps(GetVIXLAssembler());
2219 if (offset != 0) {
2220 vixl32::Register temp = temps.Acquire();
2221 __ Add(temp, addr, offset);
2222 addr = temp;
2223 }
2224 __ Ldrexd(out_lo, out_hi, addr);
2225}
2226
2227void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicStore(vixl32::Register addr,
2228 uint32_t offset,
2229 vixl32::Register value_lo,
2230 vixl32::Register value_hi,
2231 vixl32::Register temp1,
2232 vixl32::Register temp2,
2233 HInstruction* instruction) {
2234 UseScratchRegisterScope temps(GetVIXLAssembler());
2235 vixl32::Label fail;
2236 if (offset != 0) {
2237 vixl32::Register temp = temps.Acquire();
2238 __ Add(temp, addr, offset);
2239 addr = temp;
2240 }
2241 __ Bind(&fail);
2242 // We need a load followed by store. (The address used in a STREX instruction must
2243 // be the same as the address in the most recently executed LDREX instruction.)
2244 __ Ldrexd(temp1, temp2, addr);
2245 codegen_->MaybeRecordImplicitNullCheck(instruction);
2246 __ Strexd(temp1, value_lo, value_hi, addr);
2247 __ Cbnz(temp1, &fail);
2248}
2249
Scott Wakelingfe885462016-09-22 10:24:38 +01002250void InstructionCodeGeneratorARMVIXL::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2251 DCHECK(instruction->IsDiv() || instruction->IsRem());
2252 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2253
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002254 Location second = instruction->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01002255 DCHECK(second.IsConstant());
2256
2257 vixl32::Register out = OutputRegister(instruction);
2258 vixl32::Register dividend = InputRegisterAt(instruction, 0);
2259 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2260 DCHECK(imm == 1 || imm == -1);
2261
2262 if (instruction->IsRem()) {
2263 __ Mov(out, 0);
2264 } else {
2265 if (imm == 1) {
2266 __ Mov(out, dividend);
2267 } else {
2268 __ Rsb(out, dividend, 0);
2269 }
2270 }
2271}
2272
2273void InstructionCodeGeneratorARMVIXL::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2274 DCHECK(instruction->IsDiv() || instruction->IsRem());
2275 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2276
2277 LocationSummary* locations = instruction->GetLocations();
2278 Location second = locations->InAt(1);
2279 DCHECK(second.IsConstant());
2280
2281 vixl32::Register out = OutputRegister(instruction);
2282 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002283 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
Scott Wakelingfe885462016-09-22 10:24:38 +01002284 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2285 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
2286 int ctz_imm = CTZ(abs_imm);
2287
2288 if (ctz_imm == 1) {
2289 __ Lsr(temp, dividend, 32 - ctz_imm);
2290 } else {
2291 __ Asr(temp, dividend, 31);
2292 __ Lsr(temp, temp, 32 - ctz_imm);
2293 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002294 __ Add(out, temp, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01002295
2296 if (instruction->IsDiv()) {
2297 __ Asr(out, out, ctz_imm);
2298 if (imm < 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002299 __ Rsb(out, out, 0);
Scott Wakelingfe885462016-09-22 10:24:38 +01002300 }
2301 } else {
2302 __ Ubfx(out, out, 0, ctz_imm);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002303 __ Sub(out, out, temp);
Scott Wakelingfe885462016-09-22 10:24:38 +01002304 }
2305}
2306
2307void InstructionCodeGeneratorARMVIXL::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2308 DCHECK(instruction->IsDiv() || instruction->IsRem());
2309 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2310
2311 LocationSummary* locations = instruction->GetLocations();
2312 Location second = locations->InAt(1);
2313 DCHECK(second.IsConstant());
2314
2315 vixl32::Register out = OutputRegister(instruction);
2316 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002317 vixl32::Register temp1 = RegisterFrom(locations->GetTemp(0));
2318 vixl32::Register temp2 = RegisterFrom(locations->GetTemp(1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002319 int64_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2320
2321 int64_t magic;
2322 int shift;
2323 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
2324
2325 __ Mov(temp1, magic);
2326 __ Smull(temp2, temp1, dividend, temp1);
2327
2328 if (imm > 0 && magic < 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002329 __ Add(temp1, temp1, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01002330 } else if (imm < 0 && magic > 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002331 __ Sub(temp1, temp1, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01002332 }
2333
2334 if (shift != 0) {
2335 __ Asr(temp1, temp1, shift);
2336 }
2337
2338 if (instruction->IsDiv()) {
2339 __ Sub(out, temp1, Operand(temp1, vixl32::Shift(ASR), 31));
2340 } else {
2341 __ Sub(temp1, temp1, Operand(temp1, vixl32::Shift(ASR), 31));
2342 // TODO: Strength reduction for mls.
2343 __ Mov(temp2, imm);
2344 __ Mls(out, temp1, temp2, dividend);
2345 }
2346}
2347
2348void InstructionCodeGeneratorARMVIXL::GenerateDivRemConstantIntegral(
2349 HBinaryOperation* instruction) {
2350 DCHECK(instruction->IsDiv() || instruction->IsRem());
2351 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2352
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002353 Location second = instruction->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01002354 DCHECK(second.IsConstant());
2355
2356 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2357 if (imm == 0) {
2358 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2359 } else if (imm == 1 || imm == -1) {
2360 DivRemOneOrMinusOne(instruction);
2361 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
2362 DivRemByPowerOfTwo(instruction);
2363 } else {
2364 DCHECK(imm <= -2 || imm >= 2);
2365 GenerateDivRemWithAnyConstant(instruction);
2366 }
2367}
2368
2369void LocationsBuilderARMVIXL::VisitDiv(HDiv* div) {
2370 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2371 if (div->GetResultType() == Primitive::kPrimLong) {
2372 // pLdiv runtime call.
2373 call_kind = LocationSummary::kCallOnMainOnly;
2374 } else if (div->GetResultType() == Primitive::kPrimInt && div->InputAt(1)->IsConstant()) {
2375 // sdiv will be replaced by other instruction sequence.
2376 } else if (div->GetResultType() == Primitive::kPrimInt &&
2377 !codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2378 // pIdivmod runtime call.
2379 call_kind = LocationSummary::kCallOnMainOnly;
2380 }
2381
2382 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2383
2384 switch (div->GetResultType()) {
2385 case Primitive::kPrimInt: {
2386 if (div->InputAt(1)->IsConstant()) {
2387 locations->SetInAt(0, Location::RequiresRegister());
2388 locations->SetInAt(1, Location::ConstantLocation(div->InputAt(1)->AsConstant()));
2389 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2390 int32_t value = div->InputAt(1)->AsIntConstant()->GetValue();
2391 if (value == 1 || value == 0 || value == -1) {
2392 // No temp register required.
2393 } else {
2394 locations->AddTemp(Location::RequiresRegister());
2395 if (!IsPowerOfTwo(AbsOrMin(value))) {
2396 locations->AddTemp(Location::RequiresRegister());
2397 }
2398 }
2399 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2400 locations->SetInAt(0, Location::RequiresRegister());
2401 locations->SetInAt(1, Location::RequiresRegister());
2402 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2403 } else {
2404 TODO_VIXL32(FATAL);
2405 }
2406 break;
2407 }
2408 case Primitive::kPrimLong: {
2409 TODO_VIXL32(FATAL);
2410 break;
2411 }
2412 case Primitive::kPrimFloat:
2413 case Primitive::kPrimDouble: {
2414 locations->SetInAt(0, Location::RequiresFpuRegister());
2415 locations->SetInAt(1, Location::RequiresFpuRegister());
2416 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2417 break;
2418 }
2419
2420 default:
2421 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2422 }
2423}
2424
2425void InstructionCodeGeneratorARMVIXL::VisitDiv(HDiv* div) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002426 Location rhs = div->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01002427
2428 switch (div->GetResultType()) {
2429 case Primitive::kPrimInt: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002430 if (rhs.IsConstant()) {
Scott Wakelingfe885462016-09-22 10:24:38 +01002431 GenerateDivRemConstantIntegral(div);
2432 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2433 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
2434 } else {
2435 TODO_VIXL32(FATAL);
2436 }
2437 break;
2438 }
2439
2440 case Primitive::kPrimLong: {
2441 TODO_VIXL32(FATAL);
2442 break;
2443 }
2444
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002445 case Primitive::kPrimFloat:
2446 case Primitive::kPrimDouble:
2447 __ Vdiv(OutputVRegister(div), InputVRegisterAt(div, 0), InputVRegisterAt(div, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002448 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01002449
2450 default:
2451 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2452 }
2453}
2454
2455void LocationsBuilderARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002456 // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/
Scott Wakelingfe885462016-09-22 10:24:38 +01002457 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2458 ? LocationSummary::kCallOnSlowPath
2459 : LocationSummary::kNoCall;
2460 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2461 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2462 if (instruction->HasUses()) {
2463 locations->SetOut(Location::SameAsFirstInput());
2464 }
2465}
2466
2467void InstructionCodeGeneratorARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2468 DivZeroCheckSlowPathARMVIXL* slow_path =
2469 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARMVIXL(instruction);
2470 codegen_->AddSlowPath(slow_path);
2471
2472 LocationSummary* locations = instruction->GetLocations();
2473 Location value = locations->InAt(0);
2474
2475 switch (instruction->GetType()) {
2476 case Primitive::kPrimBoolean:
2477 case Primitive::kPrimByte:
2478 case Primitive::kPrimChar:
2479 case Primitive::kPrimShort:
2480 case Primitive::kPrimInt: {
2481 if (value.IsRegister()) {
2482 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2483 } else {
2484 DCHECK(value.IsConstant()) << value;
2485 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2486 __ B(slow_path->GetEntryLabel());
2487 }
2488 }
2489 break;
2490 }
2491 case Primitive::kPrimLong: {
2492 if (value.IsRegisterPair()) {
2493 UseScratchRegisterScope temps(GetVIXLAssembler());
2494 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002495 __ Orrs(temp, LowRegisterFrom(value), HighRegisterFrom(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01002496 __ B(eq, slow_path->GetEntryLabel());
2497 } else {
2498 DCHECK(value.IsConstant()) << value;
2499 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2500 __ B(slow_path->GetEntryLabel());
2501 }
2502 }
2503 break;
2504 }
2505 default:
2506 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2507 }
2508}
2509
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002510void LocationsBuilderARMVIXL::HandleFieldSet(
2511 HInstruction* instruction, const FieldInfo& field_info) {
2512 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2513
2514 LocationSummary* locations =
2515 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2516 locations->SetInAt(0, Location::RequiresRegister());
2517
2518 Primitive::Type field_type = field_info.GetFieldType();
2519 if (Primitive::IsFloatingPointType(field_type)) {
2520 locations->SetInAt(1, Location::RequiresFpuRegister());
2521 } else {
2522 locations->SetInAt(1, Location::RequiresRegister());
2523 }
2524
2525 bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2526 bool generate_volatile = field_info.IsVolatile()
2527 && is_wide
2528 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2529 bool needs_write_barrier =
2530 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
2531 // Temporary registers for the write barrier.
2532 // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2533 if (needs_write_barrier) {
2534 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
2535 locations->AddTemp(Location::RequiresRegister());
2536 } else if (generate_volatile) {
2537 // ARM encoding have some additional constraints for ldrexd/strexd:
2538 // - registers need to be consecutive
2539 // - the first register should be even but not R14.
2540 // We don't test for ARM yet, and the assertion makes sure that we
2541 // revisit this if we ever enable ARM encoding.
2542 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2543
2544 locations->AddTemp(Location::RequiresRegister());
2545 locations->AddTemp(Location::RequiresRegister());
2546 if (field_type == Primitive::kPrimDouble) {
2547 // For doubles we need two more registers to copy the value.
2548 locations->AddTemp(LocationFrom(r2));
2549 locations->AddTemp(LocationFrom(r3));
2550 }
2551 }
2552}
2553
2554void InstructionCodeGeneratorARMVIXL::HandleFieldSet(HInstruction* instruction,
2555 const FieldInfo& field_info,
2556 bool value_can_be_null) {
2557 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2558
2559 LocationSummary* locations = instruction->GetLocations();
2560 vixl32::Register base = InputRegisterAt(instruction, 0);
2561 Location value = locations->InAt(1);
2562
2563 bool is_volatile = field_info.IsVolatile();
2564 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2565 Primitive::Type field_type = field_info.GetFieldType();
2566 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2567 bool needs_write_barrier =
2568 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
2569
2570 if (is_volatile) {
2571 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2572 }
2573
2574 switch (field_type) {
2575 case Primitive::kPrimBoolean:
2576 case Primitive::kPrimByte: {
2577 GetAssembler()->StoreToOffset(kStoreByte, RegisterFrom(value), base, offset);
2578 break;
2579 }
2580
2581 case Primitive::kPrimShort:
2582 case Primitive::kPrimChar: {
2583 GetAssembler()->StoreToOffset(kStoreHalfword, RegisterFrom(value), base, offset);
2584 break;
2585 }
2586
2587 case Primitive::kPrimInt:
2588 case Primitive::kPrimNot: {
2589 if (kPoisonHeapReferences && needs_write_barrier) {
2590 // Note that in the case where `value` is a null reference,
2591 // we do not enter this block, as a null reference does not
2592 // need poisoning.
2593 DCHECK_EQ(field_type, Primitive::kPrimNot);
2594 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
2595 __ Mov(temp, RegisterFrom(value));
2596 GetAssembler()->PoisonHeapReference(temp);
2597 GetAssembler()->StoreToOffset(kStoreWord, temp, base, offset);
2598 } else {
2599 GetAssembler()->StoreToOffset(kStoreWord, RegisterFrom(value), base, offset);
2600 }
2601 break;
2602 }
2603
2604 case Primitive::kPrimLong: {
2605 if (is_volatile && !atomic_ldrd_strd) {
2606 GenerateWideAtomicStore(base,
2607 offset,
2608 LowRegisterFrom(value),
2609 HighRegisterFrom(value),
2610 RegisterFrom(locations->GetTemp(0)),
2611 RegisterFrom(locations->GetTemp(1)),
2612 instruction);
2613 } else {
2614 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), base, offset);
2615 codegen_->MaybeRecordImplicitNullCheck(instruction);
2616 }
2617 break;
2618 }
2619
2620 case Primitive::kPrimFloat: {
2621 GetAssembler()->StoreSToOffset(SRegisterFrom(value), base, offset);
2622 break;
2623 }
2624
2625 case Primitive::kPrimDouble: {
2626 vixl32::DRegister value_reg = FromLowSToD(LowSRegisterFrom(value));
2627 if (is_volatile && !atomic_ldrd_strd) {
2628 vixl32::Register value_reg_lo = RegisterFrom(locations->GetTemp(0));
2629 vixl32::Register value_reg_hi = RegisterFrom(locations->GetTemp(1));
2630
2631 __ Vmov(value_reg_lo, value_reg_hi, value_reg);
2632
2633 GenerateWideAtomicStore(base,
2634 offset,
2635 value_reg_lo,
2636 value_reg_hi,
2637 RegisterFrom(locations->GetTemp(2)),
2638 RegisterFrom(locations->GetTemp(3)),
2639 instruction);
2640 } else {
2641 GetAssembler()->StoreDToOffset(value_reg, base, offset);
2642 codegen_->MaybeRecordImplicitNullCheck(instruction);
2643 }
2644 break;
2645 }
2646
2647 case Primitive::kPrimVoid:
2648 LOG(FATAL) << "Unreachable type " << field_type;
2649 UNREACHABLE();
2650 }
2651
2652 // Longs and doubles are handled in the switch.
2653 if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2654 codegen_->MaybeRecordImplicitNullCheck(instruction);
2655 }
2656
2657 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2658 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
2659 vixl32::Register card = RegisterFrom(locations->GetTemp(1));
2660 codegen_->MarkGCCard(temp, card, base, RegisterFrom(value), value_can_be_null);
2661 }
2662
2663 if (is_volatile) {
2664 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2665 }
2666}
2667
2668void LocationsBuilderARMVIXL::HandleFieldGet(HInstruction* instruction,
2669 const FieldInfo& field_info) {
2670 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2671
2672 bool object_field_get_with_read_barrier =
2673 kEmitCompilerReadBarrier && (field_info.GetFieldType() == Primitive::kPrimNot);
2674 LocationSummary* locations =
2675 new (GetGraph()->GetArena()) LocationSummary(instruction,
2676 object_field_get_with_read_barrier ?
2677 LocationSummary::kCallOnSlowPath :
2678 LocationSummary::kNoCall);
2679 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
2680 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2681 }
2682 locations->SetInAt(0, Location::RequiresRegister());
2683
2684 bool volatile_for_double = field_info.IsVolatile()
2685 && (field_info.GetFieldType() == Primitive::kPrimDouble)
2686 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2687 // The output overlaps in case of volatile long: we don't want the
2688 // code generated by GenerateWideAtomicLoad to overwrite the
2689 // object's location. Likewise, in the case of an object field get
2690 // with read barriers enabled, we do not want the load to overwrite
2691 // the object's location, as we need it to emit the read barrier.
2692 bool overlap = (field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong)) ||
2693 object_field_get_with_read_barrier;
2694
2695 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2696 locations->SetOut(Location::RequiresFpuRegister());
2697 } else {
2698 locations->SetOut(Location::RequiresRegister(),
2699 (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
2700 }
2701 if (volatile_for_double) {
2702 // ARM encoding have some additional constraints for ldrexd/strexd:
2703 // - registers need to be consecutive
2704 // - the first register should be even but not R14.
2705 // We don't test for ARM yet, and the assertion makes sure that we
2706 // revisit this if we ever enable ARM encoding.
2707 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2708 locations->AddTemp(Location::RequiresRegister());
2709 locations->AddTemp(Location::RequiresRegister());
2710 } else if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
2711 // We need a temporary register for the read barrier marking slow
2712 // path in CodeGeneratorARM::GenerateFieldLoadWithBakerReadBarrier.
2713 locations->AddTemp(Location::RequiresRegister());
2714 }
2715}
2716
2717void InstructionCodeGeneratorARMVIXL::HandleFieldGet(HInstruction* instruction,
2718 const FieldInfo& field_info) {
2719 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2720
2721 LocationSummary* locations = instruction->GetLocations();
2722 vixl32::Register base = InputRegisterAt(instruction, 0);
2723 Location out = locations->Out();
2724 bool is_volatile = field_info.IsVolatile();
2725 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2726 Primitive::Type field_type = field_info.GetFieldType();
2727 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2728
2729 switch (field_type) {
2730 case Primitive::kPrimBoolean:
2731 GetAssembler()->LoadFromOffset(kLoadUnsignedByte, RegisterFrom(out), base, offset);
2732 break;
2733
2734 case Primitive::kPrimByte:
2735 GetAssembler()->LoadFromOffset(kLoadSignedByte, RegisterFrom(out), base, offset);
2736 break;
2737
2738 case Primitive::kPrimShort:
2739 GetAssembler()->LoadFromOffset(kLoadSignedHalfword, RegisterFrom(out), base, offset);
2740 break;
2741
2742 case Primitive::kPrimChar:
2743 GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, RegisterFrom(out), base, offset);
2744 break;
2745
2746 case Primitive::kPrimInt:
2747 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
2748 break;
2749
2750 case Primitive::kPrimNot: {
2751 // /* HeapReference<Object> */ out = *(base + offset)
2752 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2753 TODO_VIXL32(FATAL);
2754 } else {
2755 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
2756 // TODO(VIXL): Scope to guarantee the position immediately after the load.
2757 codegen_->MaybeRecordImplicitNullCheck(instruction);
2758 if (is_volatile) {
2759 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2760 }
2761 // If read barriers are enabled, emit read barriers other than
2762 // Baker's using a slow path (and also unpoison the loaded
2763 // reference, if heap poisoning is enabled).
2764 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, locations->InAt(0), offset);
2765 }
2766 break;
2767 }
2768
2769 case Primitive::kPrimLong:
2770 if (is_volatile && !atomic_ldrd_strd) {
2771 GenerateWideAtomicLoad(base, offset, LowRegisterFrom(out), HighRegisterFrom(out));
2772 } else {
2773 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out), base, offset);
2774 }
2775 break;
2776
2777 case Primitive::kPrimFloat:
2778 GetAssembler()->LoadSFromOffset(SRegisterFrom(out), base, offset);
2779 break;
2780
2781 case Primitive::kPrimDouble: {
2782 vixl32::DRegister out_dreg = FromLowSToD(LowSRegisterFrom(out));
2783 if (is_volatile && !atomic_ldrd_strd) {
2784 vixl32::Register lo = RegisterFrom(locations->GetTemp(0));
2785 vixl32::Register hi = RegisterFrom(locations->GetTemp(1));
2786 GenerateWideAtomicLoad(base, offset, lo, hi);
2787 // TODO(VIXL): Do we need to be immediately after the ldrexd instruction? If so we need a
2788 // scope.
2789 codegen_->MaybeRecordImplicitNullCheck(instruction);
2790 __ Vmov(out_dreg, lo, hi);
2791 } else {
2792 GetAssembler()->LoadDFromOffset(out_dreg, base, offset);
2793 // TODO(VIXL): Scope to guarantee the position immediately after the load.
2794 codegen_->MaybeRecordImplicitNullCheck(instruction);
2795 }
2796 break;
2797 }
2798
2799 case Primitive::kPrimVoid:
2800 LOG(FATAL) << "Unreachable type " << field_type;
2801 UNREACHABLE();
2802 }
2803
2804 if (field_type == Primitive::kPrimNot || field_type == Primitive::kPrimDouble) {
2805 // Potential implicit null checks, in the case of reference or
2806 // double fields, are handled in the previous switch statement.
2807 } else {
2808 // Address cases other than reference and double that may require an implicit null check.
2809 codegen_->MaybeRecordImplicitNullCheck(instruction);
2810 }
2811
2812 if (is_volatile) {
2813 if (field_type == Primitive::kPrimNot) {
2814 // Memory barriers, in the case of references, are also handled
2815 // in the previous switch statement.
2816 } else {
2817 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2818 }
2819 }
2820}
2821
2822void LocationsBuilderARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2823 HandleFieldSet(instruction, instruction->GetFieldInfo());
2824}
2825
2826void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2827 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
2828}
2829
2830void LocationsBuilderARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2831 HandleFieldGet(instruction, instruction->GetFieldInfo());
2832}
2833
2834void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2835 HandleFieldGet(instruction, instruction->GetFieldInfo());
2836}
2837
2838void LocationsBuilderARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2839 HandleFieldGet(instruction, instruction->GetFieldInfo());
2840}
2841
2842void InstructionCodeGeneratorARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2843 HandleFieldGet(instruction, instruction->GetFieldInfo());
2844}
2845
2846void LocationsBuilderARMVIXL::VisitNullCheck(HNullCheck* instruction) {
2847 // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/
2848 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2849 ? LocationSummary::kCallOnSlowPath
2850 : LocationSummary::kNoCall;
2851 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2852 locations->SetInAt(0, Location::RequiresRegister());
2853 if (instruction->HasUses()) {
2854 locations->SetOut(Location::SameAsFirstInput());
2855 }
2856}
2857
2858void CodeGeneratorARMVIXL::GenerateImplicitNullCheck(HNullCheck* instruction) {
2859 if (CanMoveNullCheckToUser(instruction)) {
2860 return;
2861 }
2862
2863 UseScratchRegisterScope temps(GetVIXLAssembler());
2864 AssemblerAccurateScope aas(GetVIXLAssembler(),
2865 kArmInstrMaxSizeInBytes,
2866 CodeBufferCheckScope::kMaximumSize);
2867 __ ldr(temps.Acquire(), MemOperand(InputRegisterAt(instruction, 0)));
2868 RecordPcInfo(instruction, instruction->GetDexPc());
2869}
2870
2871void CodeGeneratorARMVIXL::GenerateExplicitNullCheck(HNullCheck* instruction) {
2872 NullCheckSlowPathARMVIXL* slow_path =
2873 new (GetGraph()->GetArena()) NullCheckSlowPathARMVIXL(instruction);
2874 AddSlowPath(slow_path);
2875 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2876}
2877
2878void InstructionCodeGeneratorARMVIXL::VisitNullCheck(HNullCheck* instruction) {
2879 codegen_->GenerateNullCheck(instruction);
2880}
2881
2882void LocationsBuilderARMVIXL::VisitArrayLength(HArrayLength* instruction) {
2883 LocationSummary* locations =
2884 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2885 locations->SetInAt(0, Location::RequiresRegister());
2886 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2887}
2888
2889void InstructionCodeGeneratorARMVIXL::VisitArrayLength(HArrayLength* instruction) {
2890 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
2891 vixl32::Register obj = InputRegisterAt(instruction, 0);
2892 vixl32::Register out = OutputRegister(instruction);
2893 GetAssembler()->LoadFromOffset(kLoadWord, out, obj, offset);
2894 codegen_->MaybeRecordImplicitNullCheck(instruction);
2895 // TODO(VIXL): https://android-review.googlesource.com/#/c/272625/
2896}
2897
2898void CodeGeneratorARMVIXL::MarkGCCard(vixl32::Register temp,
2899 vixl32::Register card,
2900 vixl32::Register object,
2901 vixl32::Register value,
2902 bool can_be_null) {
2903 vixl32::Label is_null;
2904 if (can_be_null) {
2905 __ Cbz(value, &is_null);
2906 }
2907 GetAssembler()->LoadFromOffset(
2908 kLoadWord, card, tr, Thread::CardTableOffset<kArmPointerSize>().Int32Value());
2909 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
2910 __ Strb(card, MemOperand(card, temp));
2911 if (can_be_null) {
2912 __ Bind(&is_null);
2913 }
2914}
2915
Scott Wakelingfe885462016-09-22 10:24:38 +01002916void LocationsBuilderARMVIXL::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2917 LOG(FATAL) << "Unreachable";
2918}
2919
2920void InstructionCodeGeneratorARMVIXL::VisitParallelMove(HParallelMove* instruction) {
2921 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2922}
2923
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002924void LocationsBuilderARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) {
2925 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2926 // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/ and related.
2927}
2928
2929void InstructionCodeGeneratorARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) {
2930 HBasicBlock* block = instruction->GetBlock();
2931 if (block->GetLoopInformation() != nullptr) {
2932 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2933 // The back edge will generate the suspend check.
2934 return;
2935 }
2936 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2937 // The goto will generate the suspend check.
2938 return;
2939 }
2940 GenerateSuspendCheck(instruction, nullptr);
2941}
2942
2943void InstructionCodeGeneratorARMVIXL::GenerateSuspendCheck(HSuspendCheck* instruction,
2944 HBasicBlock* successor) {
2945 SuspendCheckSlowPathARMVIXL* slow_path =
2946 down_cast<SuspendCheckSlowPathARMVIXL*>(instruction->GetSlowPath());
2947 if (slow_path == nullptr) {
2948 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARMVIXL(instruction, successor);
2949 instruction->SetSlowPath(slow_path);
2950 codegen_->AddSlowPath(slow_path);
2951 if (successor != nullptr) {
2952 DCHECK(successor->IsLoopHeader());
2953 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
2954 }
2955 } else {
2956 DCHECK_EQ(slow_path->GetSuccessor(), successor);
2957 }
2958
2959 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
2960 vixl32::Register temp = temps.Acquire();
2961 GetAssembler()->LoadFromOffset(
2962 kLoadUnsignedHalfword, temp, tr, Thread::ThreadFlagsOffset<kArmPointerSize>().Int32Value());
2963 if (successor == nullptr) {
2964 __ Cbnz(temp, slow_path->GetEntryLabel());
2965 __ Bind(slow_path->GetReturnLabel());
2966 } else {
2967 __ Cbz(temp, codegen_->GetLabelOf(successor));
2968 __ B(slow_path->GetEntryLabel());
2969 }
2970}
2971
Scott Wakelingfe885462016-09-22 10:24:38 +01002972ArmVIXLAssembler* ParallelMoveResolverARMVIXL::GetAssembler() const {
2973 return codegen_->GetAssembler();
2974}
2975
2976void ParallelMoveResolverARMVIXL::EmitMove(size_t index) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002977 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
Scott Wakelingfe885462016-09-22 10:24:38 +01002978 MoveOperands* move = moves_[index];
2979 Location source = move->GetSource();
2980 Location destination = move->GetDestination();
2981
2982 if (source.IsRegister()) {
2983 if (destination.IsRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002984 __ Mov(RegisterFrom(destination), RegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01002985 } else if (destination.IsFpuRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002986 __ Vmov(SRegisterFrom(destination), RegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01002987 } else {
2988 DCHECK(destination.IsStackSlot());
2989 GetAssembler()->StoreToOffset(kStoreWord,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002990 RegisterFrom(source),
Scott Wakelingfe885462016-09-22 10:24:38 +01002991 sp,
2992 destination.GetStackIndex());
2993 }
2994 } else if (source.IsStackSlot()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002995 if (destination.IsRegister()) {
2996 GetAssembler()->LoadFromOffset(kLoadWord,
2997 RegisterFrom(destination),
2998 sp,
2999 source.GetStackIndex());
3000 } else if (destination.IsFpuRegister()) {
3001 GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex());
3002 } else {
3003 DCHECK(destination.IsStackSlot());
3004 vixl32::Register temp = temps.Acquire();
3005 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex());
3006 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
3007 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003008 } else if (source.IsFpuRegister()) {
3009 TODO_VIXL32(FATAL);
3010 } else if (source.IsDoubleStackSlot()) {
3011 TODO_VIXL32(FATAL);
3012 } else if (source.IsRegisterPair()) {
3013 if (destination.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003014 __ Mov(LowRegisterFrom(destination), LowRegisterFrom(source));
3015 __ Mov(HighRegisterFrom(destination), HighRegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01003016 } else if (destination.IsFpuRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003017 __ Vmov(FromLowSToD(LowSRegisterFrom(destination)),
3018 LowRegisterFrom(source),
3019 HighRegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01003020 } else {
3021 DCHECK(destination.IsDoubleStackSlot()) << destination;
3022 DCHECK(ExpectedPairLayout(source));
3023 GetAssembler()->StoreToOffset(kStoreWordPair,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003024 LowRegisterFrom(source),
Scott Wakelingfe885462016-09-22 10:24:38 +01003025 sp,
3026 destination.GetStackIndex());
3027 }
3028 } else if (source.IsFpuRegisterPair()) {
3029 TODO_VIXL32(FATAL);
3030 } else {
3031 DCHECK(source.IsConstant()) << source;
3032 HConstant* constant = source.GetConstant();
3033 if (constant->IsIntConstant() || constant->IsNullConstant()) {
3034 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
3035 if (destination.IsRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003036 __ Mov(RegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01003037 } else {
3038 DCHECK(destination.IsStackSlot());
Scott Wakelingfe885462016-09-22 10:24:38 +01003039 vixl32::Register temp = temps.Acquire();
3040 __ Mov(temp, value);
3041 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
3042 }
3043 } else if (constant->IsLongConstant()) {
3044 int64_t value = constant->AsLongConstant()->GetValue();
3045 if (destination.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003046 __ Mov(LowRegisterFrom(destination), Low32Bits(value));
3047 __ Mov(HighRegisterFrom(destination), High32Bits(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01003048 } else {
3049 DCHECK(destination.IsDoubleStackSlot()) << destination;
Scott Wakelingfe885462016-09-22 10:24:38 +01003050 vixl32::Register temp = temps.Acquire();
3051 __ Mov(temp, Low32Bits(value));
3052 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
3053 __ Mov(temp, High32Bits(value));
3054 GetAssembler()->StoreToOffset(kStoreWord,
3055 temp,
3056 sp,
3057 destination.GetHighStackIndex(kArmWordSize));
3058 }
3059 } else if (constant->IsDoubleConstant()) {
3060 double value = constant->AsDoubleConstant()->GetValue();
3061 if (destination.IsFpuRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003062 __ Vmov(FromLowSToD(LowSRegisterFrom(destination)), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01003063 } else {
3064 DCHECK(destination.IsDoubleStackSlot()) << destination;
3065 uint64_t int_value = bit_cast<uint64_t, double>(value);
Scott Wakelingfe885462016-09-22 10:24:38 +01003066 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003067 __ Mov(temp, Low32Bits(int_value));
Scott Wakelingfe885462016-09-22 10:24:38 +01003068 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003069 __ Mov(temp, High32Bits(int_value));
Scott Wakelingfe885462016-09-22 10:24:38 +01003070 GetAssembler()->StoreToOffset(kStoreWord,
3071 temp,
3072 sp,
3073 destination.GetHighStackIndex(kArmWordSize));
3074 }
3075 } else {
3076 DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3077 float value = constant->AsFloatConstant()->GetValue();
3078 if (destination.IsFpuRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003079 __ Vmov(SRegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01003080 } else {
3081 DCHECK(destination.IsStackSlot());
Scott Wakelingfe885462016-09-22 10:24:38 +01003082 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003083 __ Mov(temp, bit_cast<int32_t, float>(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01003084 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
3085 }
3086 }
3087 }
3088}
3089
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003090void ParallelMoveResolverARMVIXL::Exchange(Register reg ATTRIBUTE_UNUSED,
3091 int mem ATTRIBUTE_UNUSED) {
Scott Wakelingfe885462016-09-22 10:24:38 +01003092 TODO_VIXL32(FATAL);
3093}
3094
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003095void ParallelMoveResolverARMVIXL::Exchange(int mem1 ATTRIBUTE_UNUSED,
3096 int mem2 ATTRIBUTE_UNUSED) {
Scott Wakelingfe885462016-09-22 10:24:38 +01003097 TODO_VIXL32(FATAL);
3098}
3099
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003100void ParallelMoveResolverARMVIXL::EmitSwap(size_t index ATTRIBUTE_UNUSED) {
Scott Wakelingfe885462016-09-22 10:24:38 +01003101 TODO_VIXL32(FATAL);
3102}
3103
3104void ParallelMoveResolverARMVIXL::SpillScratch(int reg ATTRIBUTE_UNUSED) {
3105 TODO_VIXL32(FATAL);
3106}
3107
3108void ParallelMoveResolverARMVIXL::RestoreScratch(int reg ATTRIBUTE_UNUSED) {
3109 TODO_VIXL32(FATAL);
3110}
3111
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003112void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) {
3113 if (cls->NeedsAccessCheck()) {
3114 InvokeRuntimeCallingConventionARMVIXL calling_convention;
3115 CodeGenerator::CreateLoadClassLocationSummary(
3116 cls,
3117 LocationFrom(calling_convention.GetRegisterAt(0)),
3118 LocationFrom(r0),
3119 /* code_generator_supports_read_barrier */ true);
3120 return;
3121 }
Scott Wakelingfe885462016-09-22 10:24:38 +01003122
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003123 // TODO(VIXL): read barrier code.
3124 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
3125 ? LocationSummary::kCallOnSlowPath
3126 : LocationSummary::kNoCall;
3127 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3128 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3129 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3130 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod ||
3131 load_kind == HLoadClass::LoadKind::kDexCachePcRelative) {
3132 locations->SetInAt(0, Location::RequiresRegister());
3133 }
3134 locations->SetOut(Location::RequiresRegister());
3135}
3136
3137void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) {
3138 LocationSummary* locations = cls->GetLocations();
3139 if (cls->NeedsAccessCheck()) {
3140 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
3141 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
3142 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
3143 return;
3144 }
3145
3146 Location out_loc = locations->Out();
3147 vixl32::Register out = OutputRegister(cls);
3148
3149 // TODO(VIXL): read barrier code.
3150 bool generate_null_check = false;
3151 switch (cls->GetLoadKind()) {
3152 case HLoadClass::LoadKind::kReferrersClass: {
3153 DCHECK(!cls->CanCallRuntime());
3154 DCHECK(!cls->MustGenerateClinitCheck());
3155 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3156 vixl32::Register current_method = InputRegisterAt(cls, 0);
3157 GenerateGcRootFieldLoad(cls,
3158 out_loc,
3159 current_method,
3160 ArtMethod::DeclaringClassOffset().Int32Value());
3161 break;
3162 }
3163 case HLoadClass::LoadKind::kDexCacheViaMethod: {
3164 // /* GcRoot<mirror::Class>[] */ out =
3165 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
3166 vixl32::Register current_method = InputRegisterAt(cls, 0);
3167 const int32_t resolved_types_offset =
3168 ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value();
3169 GetAssembler()->LoadFromOffset(kLoadWord, out, current_method, resolved_types_offset);
3170 // /* GcRoot<mirror::Class> */ out = out[type_index]
3171 size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex());
3172 GenerateGcRootFieldLoad(cls, out_loc, out, offset);
3173 generate_null_check = !cls->IsInDexCache();
3174 break;
3175 }
3176 default:
3177 TODO_VIXL32(FATAL);
3178 }
3179
3180 if (generate_null_check || cls->MustGenerateClinitCheck()) {
3181 DCHECK(cls->CanCallRuntime());
3182 LoadClassSlowPathARMVIXL* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARMVIXL(
3183 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3184 codegen_->AddSlowPath(slow_path);
3185 if (generate_null_check) {
3186 __ Cbz(out, slow_path->GetEntryLabel());
3187 }
3188 if (cls->MustGenerateClinitCheck()) {
3189 GenerateClassInitializationCheck(slow_path, out);
3190 } else {
3191 __ Bind(slow_path->GetExitLabel());
3192 }
3193 }
3194}
3195
3196void InstructionCodeGeneratorARMVIXL::GenerateGcRootFieldLoad(
3197 HInstruction* instruction ATTRIBUTE_UNUSED,
3198 Location root,
3199 vixl32::Register obj,
3200 uint32_t offset,
3201 bool requires_read_barrier) {
3202 vixl32::Register root_reg = RegisterFrom(root);
3203 if (requires_read_barrier) {
3204 TODO_VIXL32(FATAL);
3205 } else {
3206 // Plain GC root load with no read barrier.
3207 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
3208 GetAssembler()->LoadFromOffset(kLoadWord, root_reg, obj, offset);
3209 // Note that GC roots are not affected by heap poisoning, thus we
3210 // do not have to unpoison `root_reg` here.
3211 }
3212}
3213
3214vixl32::Register CodeGeneratorARMVIXL::GetInvokeStaticOrDirectExtraParameter(
3215 HInvokeStaticOrDirect* invoke, vixl32::Register temp) {
3216 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
3217 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
3218 if (!invoke->GetLocations()->Intrinsified()) {
3219 return RegisterFrom(location);
3220 }
3221 // For intrinsics we allow any location, so it may be on the stack.
3222 if (!location.IsRegister()) {
3223 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, location.GetStackIndex());
3224 return temp;
3225 }
3226 // For register locations, check if the register was saved. If so, get it from the stack.
3227 // Note: There is a chance that the register was saved but not overwritten, so we could
3228 // save one load. However, since this is just an intrinsic slow path we prefer this
3229 // simple and more robust approach rather that trying to determine if that's the case.
3230 SlowPathCode* slow_path = GetCurrentSlowPath();
3231 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
3232 if (slow_path->IsCoreRegisterSaved(RegisterFrom(location).GetCode())) {
3233 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(RegisterFrom(location).GetCode());
3234 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, stack_offset);
3235 return temp;
3236 }
3237 return RegisterFrom(location);
3238}
3239
3240void CodeGeneratorARMVIXL::GenerateStaticOrDirectCall(
3241 HInvokeStaticOrDirect* invoke, Location temp) {
3242 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3243 vixl32::Register temp_reg = RegisterFrom(temp);
3244
3245 switch (invoke->GetMethodLoadKind()) {
3246 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
3247 uint32_t offset =
3248 GetThreadOffset<kArmPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
3249 // temp = thread->string_init_entrypoint
3250 GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, tr, offset);
3251 break;
3252 }
3253 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
3254 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
3255 vixl32::Register method_reg;
3256 if (current_method.IsRegister()) {
3257 method_reg = RegisterFrom(current_method);
3258 } else {
3259 TODO_VIXL32(FATAL);
3260 }
3261 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
3262 GetAssembler()->LoadFromOffset(
3263 kLoadWord,
3264 temp_reg,
3265 method_reg,
3266 ArtMethod::DexCacheResolvedMethodsOffset(kArmPointerSize).Int32Value());
3267 // temp = temp[index_in_cache];
3268 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3269 uint32_t index_in_cache = invoke->GetDexMethodIndex();
3270 GetAssembler()->LoadFromOffset(
3271 kLoadWord, temp_reg, temp_reg, CodeGenerator::GetCachePointerOffset(index_in_cache));
3272 break;
3273 }
3274 default:
3275 TODO_VIXL32(FATAL);
3276 }
3277
3278 // TODO(VIXL): Support `CodePtrLocation` values other than `kCallArtMethod`.
3279 if (invoke->GetCodePtrLocation() != HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod) {
3280 TODO_VIXL32(FATAL);
3281 }
3282
3283 // LR = callee_method->entry_point_from_quick_compiled_code_
3284 GetAssembler()->LoadFromOffset(
3285 kLoadWord,
3286 lr,
3287 RegisterFrom(callee_method),
3288 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value());
3289 // LR()
3290 __ Blx(lr);
3291
3292 DCHECK(!IsLeafMethod());
3293}
3294
3295void CodeGeneratorARMVIXL::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) {
3296 vixl32::Register temp = RegisterFrom(temp_location);
3297 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3298 invoke->GetVTableIndex(), kArmPointerSize).Uint32Value();
3299
3300 // Use the calling convention instead of the location of the receiver, as
3301 // intrinsics may have put the receiver in a different register. In the intrinsics
3302 // slow path, the arguments have been moved to the right place, so here we are
3303 // guaranteed that the receiver is the first register of the calling convention.
3304 InvokeDexCallingConventionARMVIXL calling_convention;
3305 vixl32::Register receiver = calling_convention.GetRegisterAt(0);
3306 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3307 // /* HeapReference<Class> */ temp = receiver->klass_
3308 GetAssembler()->LoadFromOffset(kLoadWord, temp, receiver, class_offset);
3309 MaybeRecordImplicitNullCheck(invoke);
3310 // Instead of simply (possibly) unpoisoning `temp` here, we should
3311 // emit a read barrier for the previous class reference load.
3312 // However this is not required in practice, as this is an
3313 // intermediate/temporary reference and because the current
3314 // concurrent copying collector keeps the from-space memory
3315 // intact/accessible until the end of the marking phase (the
3316 // concurrent copying collector may not in the future).
3317 GetAssembler()->MaybeUnpoisonHeapReference(temp);
3318
3319 // temp = temp->GetMethodAt(method_offset);
3320 uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
3321 kArmPointerSize).Int32Value();
3322 GetAssembler()->LoadFromOffset(kLoadWord, temp, temp, method_offset);
3323 // LR = temp->GetEntryPoint();
3324 GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, entry_point);
3325 // LR();
3326 __ Blx(lr);
3327}
3328
3329static int32_t GetExceptionTlsOffset() {
3330 return Thread::ExceptionOffset<kArmPointerSize>().Int32Value();
3331}
3332
3333void LocationsBuilderARMVIXL::VisitLoadException(HLoadException* load) {
3334 LocationSummary* locations =
3335 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3336 locations->SetOut(Location::RequiresRegister());
3337}
3338
3339void InstructionCodeGeneratorARMVIXL::VisitLoadException(HLoadException* load) {
3340 vixl32::Register out = OutputRegister(load);
3341 GetAssembler()->LoadFromOffset(kLoadWord, out, tr, GetExceptionTlsOffset());
3342}
3343
3344void LocationsBuilderARMVIXL::VisitClearException(HClearException* clear) {
3345 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3346}
3347
3348void InstructionCodeGeneratorARMVIXL::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
3349 UseScratchRegisterScope temps(GetVIXLAssembler());
3350 vixl32::Register temp = temps.Acquire();
3351 __ Mov(temp, 0);
3352 GetAssembler()->StoreToOffset(kStoreWord, temp, tr, GetExceptionTlsOffset());
3353}
3354
3355void LocationsBuilderARMVIXL::VisitThrow(HThrow* instruction) {
3356 LocationSummary* locations =
3357 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
3358 InvokeRuntimeCallingConventionARMVIXL calling_convention;
3359 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
3360}
3361
3362void InstructionCodeGeneratorARMVIXL::VisitThrow(HThrow* instruction) {
3363 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
3364 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
3365}
3366
3367void CodeGeneratorARMVIXL::MaybeGenerateReadBarrierSlow(HInstruction* instruction ATTRIBUTE_UNUSED,
3368 Location out,
3369 Location ref ATTRIBUTE_UNUSED,
3370 Location obj ATTRIBUTE_UNUSED,
3371 uint32_t offset ATTRIBUTE_UNUSED,
3372 Location index ATTRIBUTE_UNUSED) {
3373 if (kEmitCompilerReadBarrier) {
3374 DCHECK(!kUseBakerReadBarrier);
3375 TODO_VIXL32(FATAL);
3376 } else if (kPoisonHeapReferences) {
3377 GetAssembler()->UnpoisonHeapReference(RegisterFrom(out));
3378 }
3379}
Scott Wakelingfe885462016-09-22 10:24:38 +01003380
3381#undef __
3382#undef QUICK_ENTRY_POINT
3383#undef TODO_VIXL32
3384
3385} // namespace arm
3386} // namespace art