blob: b8d9a913117d100832d15eb7348afe5bcb0c654b [file] [log] [blame]
Scott Wakelingfe885462016-09-22 10:24:38 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm_vixl.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "art_method.h"
21#include "code_generator_utils.h"
22#include "common_arm.h"
23#include "compiled_method.h"
24#include "entrypoints/quick/quick_entrypoints.h"
25#include "gc/accounting/card_table.h"
Anton Kirilov5ec62182016-10-13 20:16:02 +010026#include "intrinsics_arm_vixl.h"
Scott Wakelingfe885462016-09-22 10:24:38 +010027#include "mirror/array-inl.h"
28#include "mirror/class-inl.h"
29#include "thread.h"
30#include "utils/arm/assembler_arm_vixl.h"
31#include "utils/arm/managed_register_arm.h"
32#include "utils/assembler.h"
33#include "utils/stack_checks.h"
34
35namespace art {
36namespace arm {
37
38namespace vixl32 = vixl::aarch32;
39using namespace vixl32; // NOLINT(build/namespaces)
40
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +010041using helpers::DRegisterFrom;
Scott Wakelingfe885462016-09-22 10:24:38 +010042using helpers::DWARFReg;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010043using helpers::HighDRegisterFrom;
44using helpers::HighRegisterFrom;
Scott Wakelingfe885462016-09-22 10:24:38 +010045using helpers::InputOperandAt;
Scott Wakelingc34dba72016-10-03 10:14:44 +010046using helpers::InputRegister;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010047using helpers::InputRegisterAt;
Scott Wakelingfe885462016-09-22 10:24:38 +010048using helpers::InputSRegisterAt;
Scott Wakelinga7812ae2016-10-17 10:03:36 +010049using helpers::InputVRegisterAt;
50using helpers::LocationFrom;
51using helpers::LowRegisterFrom;
52using helpers::LowSRegisterFrom;
53using helpers::OutputRegister;
54using helpers::OutputSRegister;
55using helpers::OutputVRegister;
56using helpers::RegisterFrom;
57using helpers::SRegisterFrom;
Scott Wakelingfe885462016-09-22 10:24:38 +010058
59using RegisterList = vixl32::RegisterList;
60
61static bool ExpectedPairLayout(Location location) {
62 // We expected this for both core and fpu register pairs.
63 return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
64}
65
Anton Kirilove28d9ae2016-10-25 18:17:23 +010066static constexpr int kCurrentMethodStackOffset = 0;
Scott Wakelingfe885462016-09-22 10:24:38 +010067static constexpr size_t kArmInstrMaxSizeInBytes = 4u;
Artem Serov551b28f2016-10-18 19:11:30 +010068static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Scott Wakelingfe885462016-09-22 10:24:38 +010069
70#ifdef __
71#error "ARM Codegen VIXL macro-assembler macro already defined."
72#endif
73
Scott Wakelingfe885462016-09-22 10:24:38 +010074// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
75#define __ down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler()-> // NOLINT
76#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, x).Int32Value()
77
78// Marker that code is yet to be, and must, be implemented.
79#define TODO_VIXL32(level) LOG(level) << __PRETTY_FUNCTION__ << " unimplemented "
80
Scott Wakelinga7812ae2016-10-17 10:03:36 +010081// SaveLiveRegisters and RestoreLiveRegisters from SlowPathCodeARM operate on sets of S registers,
82// for each live D registers they treat two corresponding S registers as live ones.
83//
84// Two following functions (SaveContiguousSRegisterList, RestoreContiguousSRegisterList) build
85// from a list of contiguous S registers a list of contiguous D registers (processing first/last
86// S registers corner cases) and save/restore this new list treating them as D registers.
87// - decreasing code size
88// - avoiding hazards on Cortex-A57, when a pair of S registers for an actual live D register is
89// restored and then used in regular non SlowPath code as D register.
90//
91// For the following example (v means the S register is live):
92// D names: | D0 | D1 | D2 | D4 | ...
93// S names: | S0 | S1 | S2 | S3 | S4 | S5 | S6 | S7 | ...
94// Live? | | v | v | v | v | v | v | | ...
95//
96// S1 and S6 will be saved/restored independently; D registers list (D1, D2) will be processed
97// as D registers.
98//
99// TODO(VIXL): All this code should be unnecessary once the VIXL AArch32 backend provides helpers
100// for lists of floating-point registers.
101static size_t SaveContiguousSRegisterList(size_t first,
102 size_t last,
103 CodeGenerator* codegen,
104 size_t stack_offset) {
105 static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes.");
106 static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes.");
107 DCHECK_LE(first, last);
108 if ((first == last) && (first == 0)) {
109 __ Vstr(vixl32::SRegister(first), MemOperand(sp, stack_offset));
110 return stack_offset + kSRegSizeInBytes;
111 }
112 if (first % 2 == 1) {
113 __ Vstr(vixl32::SRegister(first++), MemOperand(sp, stack_offset));
114 stack_offset += kSRegSizeInBytes;
115 }
116
117 bool save_last = false;
118 if (last % 2 == 0) {
119 save_last = true;
120 --last;
121 }
122
123 if (first < last) {
124 vixl32::DRegister d_reg = vixl32::DRegister(first / 2);
125 DCHECK_EQ((last - first + 1) % 2, 0u);
126 size_t number_of_d_regs = (last - first + 1) / 2;
127
128 if (number_of_d_regs == 1) {
129 __ Vstr(d_reg, MemOperand(sp, stack_offset));
130 } else if (number_of_d_regs > 1) {
131 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
132 vixl32::Register base = sp;
133 if (stack_offset != 0) {
134 base = temps.Acquire();
135 __ Add(base, sp, stack_offset);
136 }
137 __ Vstm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs));
138 }
139 stack_offset += number_of_d_regs * kDRegSizeInBytes;
140 }
141
142 if (save_last) {
143 __ Vstr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset));
144 stack_offset += kSRegSizeInBytes;
145 }
146
147 return stack_offset;
148}
149
150static size_t RestoreContiguousSRegisterList(size_t first,
151 size_t last,
152 CodeGenerator* codegen,
153 size_t stack_offset) {
154 static_assert(kSRegSizeInBytes == kArmWordSize, "Broken assumption on reg/word sizes.");
155 static_assert(kDRegSizeInBytes == 2 * kArmWordSize, "Broken assumption on reg/word sizes.");
156 DCHECK_LE(first, last);
157 if ((first == last) && (first == 0)) {
158 __ Vldr(vixl32::SRegister(first), MemOperand(sp, stack_offset));
159 return stack_offset + kSRegSizeInBytes;
160 }
161 if (first % 2 == 1) {
162 __ Vldr(vixl32::SRegister(first++), MemOperand(sp, stack_offset));
163 stack_offset += kSRegSizeInBytes;
164 }
165
166 bool restore_last = false;
167 if (last % 2 == 0) {
168 restore_last = true;
169 --last;
170 }
171
172 if (first < last) {
173 vixl32::DRegister d_reg = vixl32::DRegister(first / 2);
174 DCHECK_EQ((last - first + 1) % 2, 0u);
175 size_t number_of_d_regs = (last - first + 1) / 2;
176 if (number_of_d_regs == 1) {
177 __ Vldr(d_reg, MemOperand(sp, stack_offset));
178 } else if (number_of_d_regs > 1) {
179 UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
180 vixl32::Register base = sp;
181 if (stack_offset != 0) {
182 base = temps.Acquire();
183 __ Add(base, sp, stack_offset);
184 }
185 __ Vldm(F64, base, NO_WRITE_BACK, DRegisterList(d_reg, number_of_d_regs));
186 }
187 stack_offset += number_of_d_regs * kDRegSizeInBytes;
188 }
189
190 if (restore_last) {
191 __ Vldr(vixl32::SRegister(last + 1), MemOperand(sp, stack_offset));
192 stack_offset += kSRegSizeInBytes;
193 }
194
195 return stack_offset;
196}
197
198void SlowPathCodeARMVIXL::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
199 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
200 size_t orig_offset = stack_offset;
201
202 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
203 for (uint32_t i : LowToHighBits(core_spills)) {
204 // If the register holds an object, update the stack mask.
205 if (locations->RegisterContainsObject(i)) {
206 locations->SetStackBit(stack_offset / kVRegSize);
207 }
208 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
209 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
210 saved_core_stack_offsets_[i] = stack_offset;
211 stack_offset += kArmWordSize;
212 }
213
214 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
215 arm_codegen->GetAssembler()->StoreRegisterList(core_spills, orig_offset);
216
217 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
218 orig_offset = stack_offset;
219 for (uint32_t i : LowToHighBits(fp_spills)) {
220 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
221 saved_fpu_stack_offsets_[i] = stack_offset;
222 stack_offset += kArmWordSize;
223 }
224
225 stack_offset = orig_offset;
226 while (fp_spills != 0u) {
227 uint32_t begin = CTZ(fp_spills);
228 uint32_t tmp = fp_spills + (1u << begin);
229 fp_spills &= tmp; // Clear the contiguous range of 1s.
230 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
231 stack_offset = SaveContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
232 }
233 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
234}
235
236void SlowPathCodeARMVIXL::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
237 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
238 size_t orig_offset = stack_offset;
239
240 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
241 for (uint32_t i : LowToHighBits(core_spills)) {
242 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
243 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
244 stack_offset += kArmWordSize;
245 }
246
247 // TODO(VIXL): Check the coherency of stack_offset after this with a test.
248 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
249 arm_codegen->GetAssembler()->LoadRegisterList(core_spills, orig_offset);
250
251 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
252 while (fp_spills != 0u) {
253 uint32_t begin = CTZ(fp_spills);
254 uint32_t tmp = fp_spills + (1u << begin);
255 fp_spills &= tmp; // Clear the contiguous range of 1s.
256 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
257 stack_offset = RestoreContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
258 }
259 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
260}
261
262class NullCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
263 public:
264 explicit NullCheckSlowPathARMVIXL(HNullCheck* instruction) : SlowPathCodeARMVIXL(instruction) {}
265
266 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
267 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
268 __ Bind(GetEntryLabel());
269 if (instruction_->CanThrowIntoCatchBlock()) {
270 // Live registers will be restored in the catch block if caught.
271 SaveLiveRegisters(codegen, instruction_->GetLocations());
272 }
273 arm_codegen->InvokeRuntime(kQuickThrowNullPointer,
274 instruction_,
275 instruction_->GetDexPc(),
276 this);
277 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
278 }
279
280 bool IsFatal() const OVERRIDE { return true; }
281
282 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARMVIXL"; }
283
284 private:
285 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARMVIXL);
286};
287
Scott Wakelingfe885462016-09-22 10:24:38 +0100288class DivZeroCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
289 public:
290 explicit DivZeroCheckSlowPathARMVIXL(HDivZeroCheck* instruction)
291 : SlowPathCodeARMVIXL(instruction) {}
292
293 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100294 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
Scott Wakelingfe885462016-09-22 10:24:38 +0100295 __ Bind(GetEntryLabel());
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100296 arm_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Scott Wakelingfe885462016-09-22 10:24:38 +0100297 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
298 }
299
300 bool IsFatal() const OVERRIDE { return true; }
301
302 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARMVIXL"; }
303
304 private:
305 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARMVIXL);
306};
307
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100308class SuspendCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
309 public:
310 SuspendCheckSlowPathARMVIXL(HSuspendCheck* instruction, HBasicBlock* successor)
311 : SlowPathCodeARMVIXL(instruction), successor_(successor) {}
312
313 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
314 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
315 __ Bind(GetEntryLabel());
316 arm_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
317 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
318 if (successor_ == nullptr) {
319 __ B(GetReturnLabel());
320 } else {
321 __ B(arm_codegen->GetLabelOf(successor_));
322 }
323 }
324
325 vixl32::Label* GetReturnLabel() {
326 DCHECK(successor_ == nullptr);
327 return &return_label_;
328 }
329
330 HBasicBlock* GetSuccessor() const {
331 return successor_;
332 }
333
334 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARMVIXL"; }
335
336 private:
337 // If not null, the block to branch to after the suspend check.
338 HBasicBlock* const successor_;
339
340 // If `successor_` is null, the label to branch to after the suspend check.
341 vixl32::Label return_label_;
342
343 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARMVIXL);
344};
345
Scott Wakelingc34dba72016-10-03 10:14:44 +0100346class BoundsCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
347 public:
348 explicit BoundsCheckSlowPathARMVIXL(HBoundsCheck* instruction)
349 : SlowPathCodeARMVIXL(instruction) {}
350
351 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
352 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
353 LocationSummary* locations = instruction_->GetLocations();
354
355 __ Bind(GetEntryLabel());
356 if (instruction_->CanThrowIntoCatchBlock()) {
357 // Live registers will be restored in the catch block if caught.
358 SaveLiveRegisters(codegen, instruction_->GetLocations());
359 }
360 // We're moving two locations to locations that could overlap, so we need a parallel
361 // move resolver.
362 InvokeRuntimeCallingConventionARMVIXL calling_convention;
363 codegen->EmitParallelMoves(
364 locations->InAt(0),
365 LocationFrom(calling_convention.GetRegisterAt(0)),
366 Primitive::kPrimInt,
367 locations->InAt(1),
368 LocationFrom(calling_convention.GetRegisterAt(1)),
369 Primitive::kPrimInt);
370 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
371 ? kQuickThrowStringBounds
372 : kQuickThrowArrayBounds;
373 arm_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
374 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
375 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
376 }
377
378 bool IsFatal() const OVERRIDE { return true; }
379
380 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARMVIXL"; }
381
382 private:
383 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARMVIXL);
384};
385
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100386class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
387 public:
388 LoadClassSlowPathARMVIXL(HLoadClass* cls, HInstruction* at, uint32_t dex_pc, bool do_clinit)
389 : SlowPathCodeARMVIXL(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
390 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
391 }
392
393 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
394 LocationSummary* locations = at_->GetLocations();
395
396 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
397 __ Bind(GetEntryLabel());
398 SaveLiveRegisters(codegen, locations);
399
400 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Andreas Gampea5b09a62016-11-17 15:21:22 -0800401 __ Mov(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex().index_);
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100402 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
403 : kQuickInitializeType;
404 arm_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
405 if (do_clinit_) {
406 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
407 } else {
408 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
409 }
410
411 // Move the class to the desired location.
412 Location out = locations->Out();
413 if (out.IsValid()) {
414 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
415 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
416 }
417 RestoreLiveRegisters(codegen, locations);
418 __ B(GetExitLabel());
419 }
420
421 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARMVIXL"; }
422
423 private:
424 // The class this slow path will load.
425 HLoadClass* const cls_;
426
427 // The instruction where this slow path is happening.
428 // (Might be the load class or an initialization check).
429 HInstruction* const at_;
430
431 // The dex PC of `at_`.
432 const uint32_t dex_pc_;
433
434 // Whether to initialize the class.
435 const bool do_clinit_;
436
437 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARMVIXL);
438};
439
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100440class TypeCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
441 public:
442 TypeCheckSlowPathARMVIXL(HInstruction* instruction, bool is_fatal)
443 : SlowPathCodeARMVIXL(instruction), is_fatal_(is_fatal) {}
444
445 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
446 LocationSummary* locations = instruction_->GetLocations();
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100447 DCHECK(instruction_->IsCheckCast()
448 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
449
450 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
451 __ Bind(GetEntryLabel());
452
453 if (!is_fatal_) {
Artem Serovcfbe9132016-10-14 15:58:56 +0100454 SaveLiveRegisters(codegen, locations);
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100455 }
456
457 // We're moving two locations to locations that could overlap, so we need a parallel
458 // move resolver.
459 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100460
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800461 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800462 LocationFrom(calling_convention.GetRegisterAt(0)),
463 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800464 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800465 LocationFrom(calling_convention.GetRegisterAt(1)),
466 Primitive::kPrimNot);
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100467 if (instruction_->IsInstanceOf()) {
Artem Serovcfbe9132016-10-14 15:58:56 +0100468 arm_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
469 instruction_,
470 instruction_->GetDexPc(),
471 this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800472 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Artem Serovcfbe9132016-10-14 15:58:56 +0100473 arm_codegen->Move32(locations->Out(), LocationFrom(r0));
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100474 } else {
475 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800476 arm_codegen->InvokeRuntime(kQuickCheckInstanceOf,
477 instruction_,
478 instruction_->GetDexPc(),
479 this);
480 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100481 }
482
483 if (!is_fatal_) {
Artem Serovcfbe9132016-10-14 15:58:56 +0100484 RestoreLiveRegisters(codegen, locations);
485 __ B(GetExitLabel());
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100486 }
487 }
488
489 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARMVIXL"; }
490
491 bool IsFatal() const OVERRIDE { return is_fatal_; }
492
493 private:
494 const bool is_fatal_;
495
496 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARMVIXL);
497};
498
Scott Wakelingc34dba72016-10-03 10:14:44 +0100499class DeoptimizationSlowPathARMVIXL : public SlowPathCodeARMVIXL {
500 public:
501 explicit DeoptimizationSlowPathARMVIXL(HDeoptimize* instruction)
502 : SlowPathCodeARMVIXL(instruction) {}
503
504 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
505 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
506 __ Bind(GetEntryLabel());
507 arm_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
508 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
509 }
510
511 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARMVIXL"; }
512
513 private:
514 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARMVIXL);
515};
516
517class ArraySetSlowPathARMVIXL : public SlowPathCodeARMVIXL {
518 public:
519 explicit ArraySetSlowPathARMVIXL(HInstruction* instruction) : SlowPathCodeARMVIXL(instruction) {}
520
521 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
522 LocationSummary* locations = instruction_->GetLocations();
523 __ Bind(GetEntryLabel());
524 SaveLiveRegisters(codegen, locations);
525
526 InvokeRuntimeCallingConventionARMVIXL calling_convention;
527 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
528 parallel_move.AddMove(
529 locations->InAt(0),
530 LocationFrom(calling_convention.GetRegisterAt(0)),
531 Primitive::kPrimNot,
532 nullptr);
533 parallel_move.AddMove(
534 locations->InAt(1),
535 LocationFrom(calling_convention.GetRegisterAt(1)),
536 Primitive::kPrimInt,
537 nullptr);
538 parallel_move.AddMove(
539 locations->InAt(2),
540 LocationFrom(calling_convention.GetRegisterAt(2)),
541 Primitive::kPrimNot,
542 nullptr);
543 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
544
545 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
546 arm_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
547 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
548 RestoreLiveRegisters(codegen, locations);
549 __ B(GetExitLabel());
550 }
551
552 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARMVIXL"; }
553
554 private:
555 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARMVIXL);
556};
557
558
Scott Wakelingfe885462016-09-22 10:24:38 +0100559inline vixl32::Condition ARMCondition(IfCondition cond) {
560 switch (cond) {
561 case kCondEQ: return eq;
562 case kCondNE: return ne;
563 case kCondLT: return lt;
564 case kCondLE: return le;
565 case kCondGT: return gt;
566 case kCondGE: return ge;
567 case kCondB: return lo;
568 case kCondBE: return ls;
569 case kCondA: return hi;
570 case kCondAE: return hs;
571 }
572 LOG(FATAL) << "Unreachable";
573 UNREACHABLE();
574}
575
576// Maps signed condition to unsigned condition.
577inline vixl32::Condition ARMUnsignedCondition(IfCondition cond) {
578 switch (cond) {
579 case kCondEQ: return eq;
580 case kCondNE: return ne;
581 // Signed to unsigned.
582 case kCondLT: return lo;
583 case kCondLE: return ls;
584 case kCondGT: return hi;
585 case kCondGE: return hs;
586 // Unsigned remain unchanged.
587 case kCondB: return lo;
588 case kCondBE: return ls;
589 case kCondA: return hi;
590 case kCondAE: return hs;
591 }
592 LOG(FATAL) << "Unreachable";
593 UNREACHABLE();
594}
595
596inline vixl32::Condition ARMFPCondition(IfCondition cond, bool gt_bias) {
597 // The ARM condition codes can express all the necessary branches, see the
598 // "Meaning (floating-point)" column in the table A8-1 of the ARMv7 reference manual.
599 // There is no dex instruction or HIR that would need the missing conditions
600 // "equal or unordered" or "not equal".
601 switch (cond) {
602 case kCondEQ: return eq;
603 case kCondNE: return ne /* unordered */;
604 case kCondLT: return gt_bias ? cc : lt /* unordered */;
605 case kCondLE: return gt_bias ? ls : le /* unordered */;
606 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
607 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
608 default:
609 LOG(FATAL) << "UNREACHABLE";
610 UNREACHABLE();
611 }
612}
613
Scott Wakelingfe885462016-09-22 10:24:38 +0100614void CodeGeneratorARMVIXL::DumpCoreRegister(std::ostream& stream, int reg) const {
615 stream << vixl32::Register(reg);
616}
617
618void CodeGeneratorARMVIXL::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
619 stream << vixl32::SRegister(reg);
620}
621
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100622static uint32_t ComputeSRegisterListMask(const SRegisterList& regs) {
Scott Wakelingfe885462016-09-22 10:24:38 +0100623 uint32_t mask = 0;
624 for (uint32_t i = regs.GetFirstSRegister().GetCode();
625 i <= regs.GetLastSRegister().GetCode();
626 ++i) {
627 mask |= (1 << i);
628 }
629 return mask;
630}
631
Anton Kirilove28d9ae2016-10-25 18:17:23 +0100632size_t CodeGeneratorARMVIXL::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
633 GetAssembler()->LoadSFromOffset(vixl32::SRegister(reg_id), sp, stack_index);
634 return kArmWordSize;
635}
636
Scott Wakelingfe885462016-09-22 10:24:38 +0100637#undef __
638
639CodeGeneratorARMVIXL::CodeGeneratorARMVIXL(HGraph* graph,
640 const ArmInstructionSetFeatures& isa_features,
641 const CompilerOptions& compiler_options,
642 OptimizingCompilerStats* stats)
643 : CodeGenerator(graph,
644 kNumberOfCoreRegisters,
645 kNumberOfSRegisters,
646 kNumberOfRegisterPairs,
647 kCoreCalleeSaves.GetList(),
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100648 ComputeSRegisterListMask(kFpuCalleeSaves),
Scott Wakelingfe885462016-09-22 10:24:38 +0100649 compiler_options,
650 stats),
651 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Artem Serov551b28f2016-10-18 19:11:30 +0100652 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Scott Wakelingfe885462016-09-22 10:24:38 +0100653 location_builder_(graph, this),
654 instruction_visitor_(graph, this),
655 move_resolver_(graph->GetArena(), this),
656 assembler_(graph->GetArena()),
657 isa_features_(isa_features) {
658 // Always save the LR register to mimic Quick.
659 AddAllocatedRegister(Location::RegisterLocation(LR));
Alexandre Rames9c19bd62016-10-24 11:50:32 +0100660 // Give d14 and d15 as scratch registers to VIXL.
661 // They are removed from the register allocator in `SetupBlockedRegisters()`.
662 // TODO(VIXL): We need two scratch D registers for `EmitSwap` when swapping two double stack
663 // slots. If that is sufficiently rare, and we have pressure on FP registers, we could instead
664 // spill in `EmitSwap`. But if we actually are guaranteed to have 32 D registers, we could give
665 // d30 and d31 to VIXL to avoid removing registers from the allocator. If that is the case, we may
666 // also want to investigate giving those 14 other D registers to the allocator.
667 GetVIXLAssembler()->GetScratchVRegisterList()->Combine(d14);
668 GetVIXLAssembler()->GetScratchVRegisterList()->Combine(d15);
Scott Wakelingfe885462016-09-22 10:24:38 +0100669}
670
Artem Serov551b28f2016-10-18 19:11:30 +0100671void JumpTableARMVIXL::EmitTable(CodeGeneratorARMVIXL* codegen) {
672 uint32_t num_entries = switch_instr_->GetNumEntries();
673 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
674
675 // We are about to use the assembler to place literals directly. Make sure we have enough
676 // underlying code buffer and we have generated the jump table with right size.
677 codegen->GetVIXLAssembler()->GetBuffer().Align();
678 AssemblerAccurateScope aas(codegen->GetVIXLAssembler(),
679 num_entries * sizeof(int32_t),
680 CodeBufferCheckScope::kMaximumSize);
681 // TODO(VIXL): Check that using lower case bind is fine here.
682 codegen->GetVIXLAssembler()->bind(&table_start_);
Artem Serov09a940d2016-11-11 16:15:11 +0000683 for (uint32_t i = 0; i < num_entries; i++) {
684 codegen->GetVIXLAssembler()->place(bb_addresses_[i].get());
685 }
686}
687
688void JumpTableARMVIXL::FixTable(CodeGeneratorARMVIXL* codegen) {
689 uint32_t num_entries = switch_instr_->GetNumEntries();
690 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
691
Artem Serov551b28f2016-10-18 19:11:30 +0100692 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
693 for (uint32_t i = 0; i < num_entries; i++) {
694 vixl32::Label* target_label = codegen->GetLabelOf(successors[i]);
695 DCHECK(target_label->IsBound());
696 int32_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
697 // When doing BX to address we need to have lower bit set to 1 in T32.
698 if (codegen->GetVIXLAssembler()->IsUsingT32()) {
699 jump_offset++;
700 }
701 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
702 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
Artem Serov09a940d2016-11-11 16:15:11 +0000703
704 bb_addresses_[i].get()->UpdateValue(jump_offset, &codegen->GetVIXLAssembler()->GetBuffer());
Artem Serov551b28f2016-10-18 19:11:30 +0100705 }
706}
707
Artem Serov09a940d2016-11-11 16:15:11 +0000708void CodeGeneratorARMVIXL::FixJumpTables() {
Artem Serov551b28f2016-10-18 19:11:30 +0100709 for (auto&& jump_table : jump_tables_) {
Artem Serov09a940d2016-11-11 16:15:11 +0000710 jump_table->FixTable(this);
Artem Serov551b28f2016-10-18 19:11:30 +0100711 }
712}
713
Andreas Gampeca620d72016-11-08 08:09:33 -0800714#define __ reinterpret_cast<ArmVIXLAssembler*>(GetAssembler())->GetVIXLAssembler()-> // NOLINT
Scott Wakelingfe885462016-09-22 10:24:38 +0100715
716void CodeGeneratorARMVIXL::Finalize(CodeAllocator* allocator) {
Artem Serov09a940d2016-11-11 16:15:11 +0000717 FixJumpTables();
Scott Wakelingfe885462016-09-22 10:24:38 +0100718 GetAssembler()->FinalizeCode();
719 CodeGenerator::Finalize(allocator);
720}
721
722void CodeGeneratorARMVIXL::SetupBlockedRegisters() const {
Scott Wakelingfe885462016-09-22 10:24:38 +0100723 // Stack register, LR and PC are always reserved.
724 blocked_core_registers_[SP] = true;
725 blocked_core_registers_[LR] = true;
726 blocked_core_registers_[PC] = true;
727
728 // Reserve thread register.
729 blocked_core_registers_[TR] = true;
730
731 // Reserve temp register.
732 blocked_core_registers_[IP] = true;
733
Alexandre Rames9c19bd62016-10-24 11:50:32 +0100734 // Registers s28-s31 (d14-d15) are left to VIXL for scratch registers.
735 // (They are given to the `MacroAssembler` in `CodeGeneratorARMVIXL::CodeGeneratorARMVIXL`.)
736 blocked_fpu_registers_[28] = true;
737 blocked_fpu_registers_[29] = true;
738 blocked_fpu_registers_[30] = true;
739 blocked_fpu_registers_[31] = true;
740
Scott Wakelingfe885462016-09-22 10:24:38 +0100741 if (GetGraph()->IsDebuggable()) {
742 // Stubs do not save callee-save floating point registers. If the graph
743 // is debuggable, we need to deal with these registers differently. For
744 // now, just block them.
745 for (uint32_t i = kFpuCalleeSaves.GetFirstSRegister().GetCode();
746 i <= kFpuCalleeSaves.GetLastSRegister().GetCode();
747 ++i) {
748 blocked_fpu_registers_[i] = true;
749 }
750 }
Scott Wakelingfe885462016-09-22 10:24:38 +0100751}
752
Scott Wakelingfe885462016-09-22 10:24:38 +0100753InstructionCodeGeneratorARMVIXL::InstructionCodeGeneratorARMVIXL(HGraph* graph,
754 CodeGeneratorARMVIXL* codegen)
755 : InstructionCodeGenerator(graph, codegen),
756 assembler_(codegen->GetAssembler()),
757 codegen_(codegen) {}
758
759void CodeGeneratorARMVIXL::ComputeSpillMask() {
760 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
761 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
762 // There is no easy instruction to restore just the PC on thumb2. We spill and
763 // restore another arbitrary register.
764 core_spill_mask_ |= (1 << kCoreAlwaysSpillRegister.GetCode());
765 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
766 // We use vpush and vpop for saving and restoring floating point registers, which take
767 // a SRegister and the number of registers to save/restore after that SRegister. We
768 // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
769 // but in the range.
770 if (fpu_spill_mask_ != 0) {
771 uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
772 uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
773 for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
774 fpu_spill_mask_ |= (1 << i);
775 }
776 }
777}
778
779void CodeGeneratorARMVIXL::GenerateFrameEntry() {
780 bool skip_overflow_check =
781 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
782 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
783 __ Bind(&frame_entry_label_);
784
785 if (HasEmptyFrame()) {
786 return;
787 }
788
Scott Wakelingfe885462016-09-22 10:24:38 +0100789 if (!skip_overflow_check) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100790 UseScratchRegisterScope temps(GetVIXLAssembler());
791 vixl32::Register temp = temps.Acquire();
Scott Wakelingfe885462016-09-22 10:24:38 +0100792 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
793 // The load must immediately precede RecordPcInfo.
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100794 AssemblerAccurateScope aas(GetVIXLAssembler(),
795 kArmInstrMaxSizeInBytes,
796 CodeBufferCheckScope::kMaximumSize);
797 __ ldr(temp, MemOperand(temp));
798 RecordPcInfo(nullptr, 0);
Scott Wakelingfe885462016-09-22 10:24:38 +0100799 }
800
801 __ Push(RegisterList(core_spill_mask_));
802 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(core_spill_mask_));
803 GetAssembler()->cfi().RelOffsetForMany(DWARFReg(kMethodRegister),
804 0,
805 core_spill_mask_,
806 kArmWordSize);
807 if (fpu_spill_mask_ != 0) {
808 uint32_t first = LeastSignificantBit(fpu_spill_mask_);
809
810 // Check that list is contiguous.
811 DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_)));
812
813 __ Vpush(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_)));
814 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(fpu_spill_mask_));
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100815 GetAssembler()->cfi().RelOffsetForMany(DWARFReg(s0), 0, fpu_spill_mask_, kArmWordSize);
Scott Wakelingfe885462016-09-22 10:24:38 +0100816 }
817 int adjust = GetFrameSize() - FrameEntrySpillSize();
818 __ Sub(sp, sp, adjust);
819 GetAssembler()->cfi().AdjustCFAOffset(adjust);
820 GetAssembler()->StoreToOffset(kStoreWord, kMethodRegister, sp, 0);
821}
822
823void CodeGeneratorARMVIXL::GenerateFrameExit() {
824 if (HasEmptyFrame()) {
825 __ Bx(lr);
826 return;
827 }
828 GetAssembler()->cfi().RememberState();
829 int adjust = GetFrameSize() - FrameEntrySpillSize();
830 __ Add(sp, sp, adjust);
831 GetAssembler()->cfi().AdjustCFAOffset(-adjust);
832 if (fpu_spill_mask_ != 0) {
833 uint32_t first = LeastSignificantBit(fpu_spill_mask_);
834
835 // Check that list is contiguous.
836 DCHECK_EQ(fpu_spill_mask_ >> CTZ(fpu_spill_mask_), ~0u >> (32 - POPCOUNT(fpu_spill_mask_)));
837
838 __ Vpop(SRegisterList(vixl32::SRegister(first), POPCOUNT(fpu_spill_mask_)));
839 GetAssembler()->cfi().AdjustCFAOffset(
840 -static_cast<int>(kArmWordSize) * POPCOUNT(fpu_spill_mask_));
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100841 GetAssembler()->cfi().RestoreMany(DWARFReg(vixl32::SRegister(0)), fpu_spill_mask_);
Scott Wakelingfe885462016-09-22 10:24:38 +0100842 }
843 // Pop LR into PC to return.
844 DCHECK_NE(core_spill_mask_ & (1 << kLrCode), 0U);
845 uint32_t pop_mask = (core_spill_mask_ & (~(1 << kLrCode))) | 1 << kPcCode;
846 __ Pop(RegisterList(pop_mask));
847 GetAssembler()->cfi().RestoreState();
848 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
849}
850
851void CodeGeneratorARMVIXL::Bind(HBasicBlock* block) {
852 __ Bind(GetLabelOf(block));
853}
854
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100855void CodeGeneratorARMVIXL::Move32(Location destination, Location source) {
856 if (source.Equals(destination)) {
857 return;
858 }
859 if (destination.IsRegister()) {
860 if (source.IsRegister()) {
861 __ Mov(RegisterFrom(destination), RegisterFrom(source));
862 } else if (source.IsFpuRegister()) {
863 __ Vmov(RegisterFrom(destination), SRegisterFrom(source));
864 } else {
865 GetAssembler()->LoadFromOffset(kLoadWord,
866 RegisterFrom(destination),
867 sp,
868 source.GetStackIndex());
869 }
870 } else if (destination.IsFpuRegister()) {
871 if (source.IsRegister()) {
872 __ Vmov(SRegisterFrom(destination), RegisterFrom(source));
873 } else if (source.IsFpuRegister()) {
874 __ Vmov(SRegisterFrom(destination), SRegisterFrom(source));
875 } else {
876 GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex());
877 }
878 } else {
879 DCHECK(destination.IsStackSlot()) << destination;
880 if (source.IsRegister()) {
881 GetAssembler()->StoreToOffset(kStoreWord,
882 RegisterFrom(source),
883 sp,
884 destination.GetStackIndex());
885 } else if (source.IsFpuRegister()) {
886 GetAssembler()->StoreSToOffset(SRegisterFrom(source), sp, destination.GetStackIndex());
887 } else {
888 DCHECK(source.IsStackSlot()) << source;
889 UseScratchRegisterScope temps(GetVIXLAssembler());
890 vixl32::Register temp = temps.Acquire();
891 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex());
892 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
893 }
894 }
895}
896
Artem Serovcfbe9132016-10-14 15:58:56 +0100897void CodeGeneratorARMVIXL::MoveConstant(Location location, int32_t value) {
898 DCHECK(location.IsRegister());
899 __ Mov(RegisterFrom(location), value);
Scott Wakelingfe885462016-09-22 10:24:38 +0100900}
901
902void CodeGeneratorARMVIXL::MoveLocation(Location dst, Location src, Primitive::Type dst_type) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100903 // TODO(VIXL): Maybe refactor to have the 'move' implementation here and use it in
904 // `ParallelMoveResolverARMVIXL::EmitMove`, as is done in the `arm64` backend.
905 HParallelMove move(GetGraph()->GetArena());
906 move.AddMove(src, dst, dst_type, nullptr);
907 GetMoveResolver()->EmitNativeCode(&move);
Scott Wakelingfe885462016-09-22 10:24:38 +0100908}
909
Artem Serovcfbe9132016-10-14 15:58:56 +0100910void CodeGeneratorARMVIXL::AddLocationAsTemp(Location location, LocationSummary* locations) {
911 if (location.IsRegister()) {
912 locations->AddTemp(location);
913 } else if (location.IsRegisterPair()) {
914 locations->AddTemp(LocationFrom(LowRegisterFrom(location)));
915 locations->AddTemp(LocationFrom(HighRegisterFrom(location)));
916 } else {
917 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
918 }
Scott Wakelingfe885462016-09-22 10:24:38 +0100919}
920
921void CodeGeneratorARMVIXL::InvokeRuntime(QuickEntrypointEnum entrypoint,
922 HInstruction* instruction,
923 uint32_t dex_pc,
924 SlowPathCode* slow_path) {
925 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
926 GenerateInvokeRuntime(GetThreadOffset<kArmPointerSize>(entrypoint).Int32Value());
927 if (EntrypointRequiresStackMap(entrypoint)) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100928 // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the
929 // previous instruction.
Scott Wakelingfe885462016-09-22 10:24:38 +0100930 RecordPcInfo(instruction, dex_pc, slow_path);
931 }
932}
933
934void CodeGeneratorARMVIXL::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
935 HInstruction* instruction,
936 SlowPathCode* slow_path) {
937 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
938 GenerateInvokeRuntime(entry_point_offset);
939}
940
941void CodeGeneratorARMVIXL::GenerateInvokeRuntime(int32_t entry_point_offset) {
942 GetAssembler()->LoadFromOffset(kLoadWord, lr, tr, entry_point_offset);
943 __ Blx(lr);
944}
945
Scott Wakelingfe885462016-09-22 10:24:38 +0100946void InstructionCodeGeneratorARMVIXL::HandleGoto(HInstruction* got, HBasicBlock* successor) {
947 DCHECK(!successor->IsExitBlock());
948 HBasicBlock* block = got->GetBlock();
949 HInstruction* previous = got->GetPrevious();
950 HLoopInformation* info = block->GetLoopInformation();
951
952 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
953 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
954 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
955 return;
956 }
957 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
958 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
959 }
960 if (!codegen_->GoesToNextBlock(block, successor)) {
961 __ B(codegen_->GetLabelOf(successor));
962 }
963}
964
965void LocationsBuilderARMVIXL::VisitGoto(HGoto* got) {
966 got->SetLocations(nullptr);
967}
968
969void InstructionCodeGeneratorARMVIXL::VisitGoto(HGoto* got) {
970 HandleGoto(got, got->GetSuccessor());
971}
972
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100973void LocationsBuilderARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) {
974 try_boundary->SetLocations(nullptr);
975}
976
977void InstructionCodeGeneratorARMVIXL::VisitTryBoundary(HTryBoundary* try_boundary) {
978 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
979 if (!successor->IsExitBlock()) {
980 HandleGoto(try_boundary, successor);
981 }
982}
983
Scott Wakelingfe885462016-09-22 10:24:38 +0100984void LocationsBuilderARMVIXL::VisitExit(HExit* exit) {
985 exit->SetLocations(nullptr);
986}
987
988void InstructionCodeGeneratorARMVIXL::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
989}
990
991void InstructionCodeGeneratorARMVIXL::GenerateVcmp(HInstruction* instruction) {
992 Primitive::Type type = instruction->InputAt(0)->GetType();
993 Location lhs_loc = instruction->GetLocations()->InAt(0);
994 Location rhs_loc = instruction->GetLocations()->InAt(1);
995 if (rhs_loc.IsConstant()) {
996 // 0.0 is the only immediate that can be encoded directly in
997 // a VCMP instruction.
998 //
999 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
1000 // specify that in a floating-point comparison, positive zero
1001 // and negative zero are considered equal, so we can use the
1002 // literal 0.0 for both cases here.
1003 //
1004 // Note however that some methods (Float.equal, Float.compare,
1005 // Float.compareTo, Double.equal, Double.compare,
1006 // Double.compareTo, Math.max, Math.min, StrictMath.max,
1007 // StrictMath.min) consider 0.0 to be (strictly) greater than
1008 // -0.0. So if we ever translate calls to these methods into a
1009 // HCompare instruction, we must handle the -0.0 case with
1010 // care here.
1011 DCHECK(rhs_loc.GetConstant()->IsArithmeticZero());
1012 if (type == Primitive::kPrimFloat) {
1013 __ Vcmp(F32, InputSRegisterAt(instruction, 0), 0.0);
1014 } else {
1015 DCHECK_EQ(type, Primitive::kPrimDouble);
Scott Wakelingc34dba72016-10-03 10:14:44 +01001016 __ Vcmp(F64, DRegisterFrom(lhs_loc), 0.0);
Scott Wakelingfe885462016-09-22 10:24:38 +01001017 }
1018 } else {
1019 if (type == Primitive::kPrimFloat) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001020 __ Vcmp(InputSRegisterAt(instruction, 0), InputSRegisterAt(instruction, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01001021 } else {
1022 DCHECK_EQ(type, Primitive::kPrimDouble);
Scott Wakelingc34dba72016-10-03 10:14:44 +01001023 __ Vcmp(DRegisterFrom(lhs_loc), DRegisterFrom(rhs_loc));
Scott Wakelingfe885462016-09-22 10:24:38 +01001024 }
1025 }
1026}
1027
1028void InstructionCodeGeneratorARMVIXL::GenerateFPJumps(HCondition* cond,
1029 vixl32::Label* true_label,
1030 vixl32::Label* false_label ATTRIBUTE_UNUSED) {
1031 // To branch on the result of the FP compare we transfer FPSCR to APSR (encoded as PC in VMRS).
1032 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
1033 __ B(ARMFPCondition(cond->GetCondition(), cond->IsGtBias()), true_label);
1034}
1035
1036void InstructionCodeGeneratorARMVIXL::GenerateLongComparesAndJumps(HCondition* cond,
1037 vixl32::Label* true_label,
1038 vixl32::Label* false_label) {
1039 LocationSummary* locations = cond->GetLocations();
1040 Location left = locations->InAt(0);
1041 Location right = locations->InAt(1);
1042 IfCondition if_cond = cond->GetCondition();
1043
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001044 vixl32::Register left_high = HighRegisterFrom(left);
1045 vixl32::Register left_low = LowRegisterFrom(left);
Scott Wakelingfe885462016-09-22 10:24:38 +01001046 IfCondition true_high_cond = if_cond;
1047 IfCondition false_high_cond = cond->GetOppositeCondition();
1048 vixl32::Condition final_condition = ARMUnsignedCondition(if_cond); // unsigned on lower part
1049
1050 // Set the conditions for the test, remembering that == needs to be
1051 // decided using the low words.
1052 // TODO: consider avoiding jumps with temporary and CMP low+SBC high
1053 switch (if_cond) {
1054 case kCondEQ:
1055 case kCondNE:
1056 // Nothing to do.
1057 break;
1058 case kCondLT:
1059 false_high_cond = kCondGT;
1060 break;
1061 case kCondLE:
1062 true_high_cond = kCondLT;
1063 break;
1064 case kCondGT:
1065 false_high_cond = kCondLT;
1066 break;
1067 case kCondGE:
1068 true_high_cond = kCondGT;
1069 break;
1070 case kCondB:
1071 false_high_cond = kCondA;
1072 break;
1073 case kCondBE:
1074 true_high_cond = kCondB;
1075 break;
1076 case kCondA:
1077 false_high_cond = kCondB;
1078 break;
1079 case kCondAE:
1080 true_high_cond = kCondA;
1081 break;
1082 }
1083 if (right.IsConstant()) {
1084 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
1085 int32_t val_low = Low32Bits(value);
1086 int32_t val_high = High32Bits(value);
1087
1088 __ Cmp(left_high, val_high);
1089 if (if_cond == kCondNE) {
1090 __ B(ARMCondition(true_high_cond), true_label);
1091 } else if (if_cond == kCondEQ) {
1092 __ B(ARMCondition(false_high_cond), false_label);
1093 } else {
1094 __ B(ARMCondition(true_high_cond), true_label);
1095 __ B(ARMCondition(false_high_cond), false_label);
1096 }
1097 // Must be equal high, so compare the lows.
1098 __ Cmp(left_low, val_low);
1099 } else {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001100 vixl32::Register right_high = HighRegisterFrom(right);
1101 vixl32::Register right_low = LowRegisterFrom(right);
Scott Wakelingfe885462016-09-22 10:24:38 +01001102
1103 __ Cmp(left_high, right_high);
1104 if (if_cond == kCondNE) {
1105 __ B(ARMCondition(true_high_cond), true_label);
1106 } else if (if_cond == kCondEQ) {
1107 __ B(ARMCondition(false_high_cond), false_label);
1108 } else {
1109 __ B(ARMCondition(true_high_cond), true_label);
1110 __ B(ARMCondition(false_high_cond), false_label);
1111 }
1112 // Must be equal high, so compare the lows.
1113 __ Cmp(left_low, right_low);
1114 }
1115 // The last comparison might be unsigned.
1116 // TODO: optimize cases where this is always true/false
1117 __ B(final_condition, true_label);
1118}
1119
1120void InstructionCodeGeneratorARMVIXL::GenerateCompareTestAndBranch(HCondition* condition,
1121 vixl32::Label* true_target_in,
1122 vixl32::Label* false_target_in) {
1123 // Generated branching requires both targets to be explicit. If either of the
1124 // targets is nullptr (fallthrough) use and bind `fallthrough` instead.
1125 vixl32::Label fallthrough;
1126 vixl32::Label* true_target = (true_target_in == nullptr) ? &fallthrough : true_target_in;
1127 vixl32::Label* false_target = (false_target_in == nullptr) ? &fallthrough : false_target_in;
1128
1129 Primitive::Type type = condition->InputAt(0)->GetType();
1130 switch (type) {
1131 case Primitive::kPrimLong:
1132 GenerateLongComparesAndJumps(condition, true_target, false_target);
1133 break;
1134 case Primitive::kPrimFloat:
1135 case Primitive::kPrimDouble:
1136 GenerateVcmp(condition);
1137 GenerateFPJumps(condition, true_target, false_target);
1138 break;
1139 default:
1140 LOG(FATAL) << "Unexpected compare type " << type;
1141 }
1142
1143 if (false_target != &fallthrough) {
1144 __ B(false_target);
1145 }
1146
1147 if (true_target_in == nullptr || false_target_in == nullptr) {
1148 __ Bind(&fallthrough);
1149 }
1150}
1151
1152void InstructionCodeGeneratorARMVIXL::GenerateTestAndBranch(HInstruction* instruction,
1153 size_t condition_input_index,
1154 vixl32::Label* true_target,
1155 vixl32::Label* false_target) {
1156 HInstruction* cond = instruction->InputAt(condition_input_index);
1157
1158 if (true_target == nullptr && false_target == nullptr) {
1159 // Nothing to do. The code always falls through.
1160 return;
1161 } else if (cond->IsIntConstant()) {
1162 // Constant condition, statically compared against "true" (integer value 1).
1163 if (cond->AsIntConstant()->IsTrue()) {
1164 if (true_target != nullptr) {
1165 __ B(true_target);
1166 }
1167 } else {
1168 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
1169 if (false_target != nullptr) {
1170 __ B(false_target);
1171 }
1172 }
1173 return;
1174 }
1175
1176 // The following code generates these patterns:
1177 // (1) true_target == nullptr && false_target != nullptr
1178 // - opposite condition true => branch to false_target
1179 // (2) true_target != nullptr && false_target == nullptr
1180 // - condition true => branch to true_target
1181 // (3) true_target != nullptr && false_target != nullptr
1182 // - condition true => branch to true_target
1183 // - branch to false_target
1184 if (IsBooleanValueOrMaterializedCondition(cond)) {
1185 // Condition has been materialized, compare the output to 0.
1186 if (kIsDebugBuild) {
1187 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
1188 DCHECK(cond_val.IsRegister());
1189 }
1190 if (true_target == nullptr) {
1191 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
1192 } else {
1193 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
1194 }
1195 } else {
1196 // Condition has not been materialized. Use its inputs as the comparison and
1197 // its condition as the branch condition.
1198 HCondition* condition = cond->AsCondition();
1199
1200 // If this is a long or FP comparison that has been folded into
1201 // the HCondition, generate the comparison directly.
1202 Primitive::Type type = condition->InputAt(0)->GetType();
1203 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1204 GenerateCompareTestAndBranch(condition, true_target, false_target);
1205 return;
1206 }
1207
1208 LocationSummary* locations = cond->GetLocations();
1209 DCHECK(locations->InAt(0).IsRegister());
1210 vixl32::Register left = InputRegisterAt(cond, 0);
1211 Location right = locations->InAt(1);
1212 if (right.IsRegister()) {
1213 __ Cmp(left, InputRegisterAt(cond, 1));
1214 } else {
1215 DCHECK(right.IsConstant());
1216 __ Cmp(left, CodeGenerator::GetInt32ValueOf(right.GetConstant()));
1217 }
1218 if (true_target == nullptr) {
1219 __ B(ARMCondition(condition->GetOppositeCondition()), false_target);
1220 } else {
1221 __ B(ARMCondition(condition->GetCondition()), true_target);
1222 }
1223 }
1224
1225 // If neither branch falls through (case 3), the conditional branch to `true_target`
1226 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1227 if (true_target != nullptr && false_target != nullptr) {
1228 __ B(false_target);
1229 }
1230}
1231
1232void LocationsBuilderARMVIXL::VisitIf(HIf* if_instr) {
1233 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1234 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
1235 locations->SetInAt(0, Location::RequiresRegister());
1236 }
1237}
1238
1239void InstructionCodeGeneratorARMVIXL::VisitIf(HIf* if_instr) {
1240 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1241 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001242 vixl32::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1243 nullptr : codegen_->GetLabelOf(true_successor);
1244 vixl32::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1245 nullptr : codegen_->GetLabelOf(false_successor);
Scott Wakelingfe885462016-09-22 10:24:38 +01001246 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
1247}
1248
Scott Wakelingc34dba72016-10-03 10:14:44 +01001249void LocationsBuilderARMVIXL::VisitDeoptimize(HDeoptimize* deoptimize) {
1250 LocationSummary* locations = new (GetGraph()->GetArena())
1251 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
1252 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
1253 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
1254 locations->SetInAt(0, Location::RequiresRegister());
1255 }
1256}
1257
1258void InstructionCodeGeneratorARMVIXL::VisitDeoptimize(HDeoptimize* deoptimize) {
1259 SlowPathCodeARMVIXL* slow_path =
1260 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARMVIXL>(deoptimize);
1261 GenerateTestAndBranch(deoptimize,
1262 /* condition_input_index */ 0,
1263 slow_path->GetEntryLabel(),
1264 /* false_target */ nullptr);
1265}
1266
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001267void LocationsBuilderARMVIXL::VisitSelect(HSelect* select) {
1268 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1269 if (Primitive::IsFloatingPointType(select->GetType())) {
1270 locations->SetInAt(0, Location::RequiresFpuRegister());
1271 locations->SetInAt(1, Location::RequiresFpuRegister());
1272 } else {
1273 locations->SetInAt(0, Location::RequiresRegister());
1274 locations->SetInAt(1, Location::RequiresRegister());
1275 }
1276 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1277 locations->SetInAt(2, Location::RequiresRegister());
1278 }
1279 locations->SetOut(Location::SameAsFirstInput());
1280}
1281
1282void InstructionCodeGeneratorARMVIXL::VisitSelect(HSelect* select) {
1283 LocationSummary* locations = select->GetLocations();
1284 vixl32::Label false_target;
1285 GenerateTestAndBranch(select,
1286 /* condition_input_index */ 2,
1287 /* true_target */ nullptr,
1288 &false_target);
1289 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1290 __ Bind(&false_target);
1291}
1292
Artem Serov551b28f2016-10-18 19:11:30 +01001293void LocationsBuilderARMVIXL::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1294 new (GetGraph()->GetArena()) LocationSummary(info);
1295}
1296
1297void InstructionCodeGeneratorARMVIXL::VisitNativeDebugInfo(HNativeDebugInfo*) {
1298 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
1299}
1300
Scott Wakelingfe885462016-09-22 10:24:38 +01001301void CodeGeneratorARMVIXL::GenerateNop() {
1302 __ Nop();
1303}
1304
1305void LocationsBuilderARMVIXL::HandleCondition(HCondition* cond) {
1306 LocationSummary* locations =
1307 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
1308 // Handle the long/FP comparisons made in instruction simplification.
1309 switch (cond->InputAt(0)->GetType()) {
1310 case Primitive::kPrimLong:
1311 locations->SetInAt(0, Location::RequiresRegister());
1312 locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1)));
1313 if (!cond->IsEmittedAtUseSite()) {
1314 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1315 }
1316 break;
1317
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001318 // TODO(VIXL): https://android-review.googlesource.com/#/c/252265/
Scott Wakelingfe885462016-09-22 10:24:38 +01001319 case Primitive::kPrimFloat:
1320 case Primitive::kPrimDouble:
1321 locations->SetInAt(0, Location::RequiresFpuRegister());
1322 locations->SetInAt(1, Location::RequiresFpuRegister());
1323 if (!cond->IsEmittedAtUseSite()) {
1324 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1325 }
1326 break;
1327
1328 default:
1329 locations->SetInAt(0, Location::RequiresRegister());
1330 locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1)));
1331 if (!cond->IsEmittedAtUseSite()) {
1332 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1333 }
1334 }
1335}
1336
1337void InstructionCodeGeneratorARMVIXL::HandleCondition(HCondition* cond) {
1338 if (cond->IsEmittedAtUseSite()) {
1339 return;
1340 }
1341
Scott Wakelingfe885462016-09-22 10:24:38 +01001342 vixl32::Register out = OutputRegister(cond);
1343 vixl32::Label true_label, false_label;
1344
1345 switch (cond->InputAt(0)->GetType()) {
1346 default: {
1347 // Integer case.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001348 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
1349 AssemblerAccurateScope aas(GetVIXLAssembler(),
1350 kArmInstrMaxSizeInBytes * 3u,
1351 CodeBufferCheckScope::kMaximumSize);
1352 __ ite(ARMCondition(cond->GetCondition()));
1353 __ mov(ARMCondition(cond->GetCondition()), OutputRegister(cond), 1);
1354 __ mov(ARMCondition(cond->GetOppositeCondition()), OutputRegister(cond), 0);
Scott Wakelingfe885462016-09-22 10:24:38 +01001355 return;
1356 }
1357 case Primitive::kPrimLong:
1358 GenerateLongComparesAndJumps(cond, &true_label, &false_label);
1359 break;
1360 case Primitive::kPrimFloat:
1361 case Primitive::kPrimDouble:
1362 GenerateVcmp(cond);
1363 GenerateFPJumps(cond, &true_label, &false_label);
1364 break;
1365 }
1366
1367 // Convert the jumps into the result.
1368 vixl32::Label done_label;
1369
1370 // False case: result = 0.
1371 __ Bind(&false_label);
1372 __ Mov(out, 0);
1373 __ B(&done_label);
1374
1375 // True case: result = 1.
1376 __ Bind(&true_label);
1377 __ Mov(out, 1);
1378 __ Bind(&done_label);
1379}
1380
1381void LocationsBuilderARMVIXL::VisitEqual(HEqual* comp) {
1382 HandleCondition(comp);
1383}
1384
1385void InstructionCodeGeneratorARMVIXL::VisitEqual(HEqual* comp) {
1386 HandleCondition(comp);
1387}
1388
1389void LocationsBuilderARMVIXL::VisitNotEqual(HNotEqual* comp) {
1390 HandleCondition(comp);
1391}
1392
1393void InstructionCodeGeneratorARMVIXL::VisitNotEqual(HNotEqual* comp) {
1394 HandleCondition(comp);
1395}
1396
1397void LocationsBuilderARMVIXL::VisitLessThan(HLessThan* comp) {
1398 HandleCondition(comp);
1399}
1400
1401void InstructionCodeGeneratorARMVIXL::VisitLessThan(HLessThan* comp) {
1402 HandleCondition(comp);
1403}
1404
1405void LocationsBuilderARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1406 HandleCondition(comp);
1407}
1408
1409void InstructionCodeGeneratorARMVIXL::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1410 HandleCondition(comp);
1411}
1412
1413void LocationsBuilderARMVIXL::VisitGreaterThan(HGreaterThan* comp) {
1414 HandleCondition(comp);
1415}
1416
1417void InstructionCodeGeneratorARMVIXL::VisitGreaterThan(HGreaterThan* comp) {
1418 HandleCondition(comp);
1419}
1420
1421void LocationsBuilderARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1422 HandleCondition(comp);
1423}
1424
1425void InstructionCodeGeneratorARMVIXL::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1426 HandleCondition(comp);
1427}
1428
1429void LocationsBuilderARMVIXL::VisitBelow(HBelow* comp) {
1430 HandleCondition(comp);
1431}
1432
1433void InstructionCodeGeneratorARMVIXL::VisitBelow(HBelow* comp) {
1434 HandleCondition(comp);
1435}
1436
1437void LocationsBuilderARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) {
1438 HandleCondition(comp);
1439}
1440
1441void InstructionCodeGeneratorARMVIXL::VisitBelowOrEqual(HBelowOrEqual* comp) {
1442 HandleCondition(comp);
1443}
1444
1445void LocationsBuilderARMVIXL::VisitAbove(HAbove* comp) {
1446 HandleCondition(comp);
1447}
1448
1449void InstructionCodeGeneratorARMVIXL::VisitAbove(HAbove* comp) {
1450 HandleCondition(comp);
1451}
1452
1453void LocationsBuilderARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) {
1454 HandleCondition(comp);
1455}
1456
1457void InstructionCodeGeneratorARMVIXL::VisitAboveOrEqual(HAboveOrEqual* comp) {
1458 HandleCondition(comp);
1459}
1460
1461void LocationsBuilderARMVIXL::VisitIntConstant(HIntConstant* constant) {
1462 LocationSummary* locations =
1463 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1464 locations->SetOut(Location::ConstantLocation(constant));
1465}
1466
1467void InstructionCodeGeneratorARMVIXL::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
1468 // Will be generated at use site.
1469}
1470
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001471void LocationsBuilderARMVIXL::VisitNullConstant(HNullConstant* constant) {
1472 LocationSummary* locations =
1473 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1474 locations->SetOut(Location::ConstantLocation(constant));
1475}
1476
1477void InstructionCodeGeneratorARMVIXL::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
1478 // Will be generated at use site.
1479}
1480
Scott Wakelingfe885462016-09-22 10:24:38 +01001481void LocationsBuilderARMVIXL::VisitLongConstant(HLongConstant* constant) {
1482 LocationSummary* locations =
1483 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1484 locations->SetOut(Location::ConstantLocation(constant));
1485}
1486
1487void InstructionCodeGeneratorARMVIXL::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
1488 // Will be generated at use site.
1489}
1490
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01001491void LocationsBuilderARMVIXL::VisitFloatConstant(HFloatConstant* constant) {
1492 LocationSummary* locations =
1493 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1494 locations->SetOut(Location::ConstantLocation(constant));
1495}
1496
Scott Wakelingc34dba72016-10-03 10:14:44 +01001497void InstructionCodeGeneratorARMVIXL::VisitFloatConstant(
1498 HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01001499 // Will be generated at use site.
1500}
1501
1502void LocationsBuilderARMVIXL::VisitDoubleConstant(HDoubleConstant* constant) {
1503 LocationSummary* locations =
1504 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1505 locations->SetOut(Location::ConstantLocation(constant));
1506}
1507
Scott Wakelingc34dba72016-10-03 10:14:44 +01001508void InstructionCodeGeneratorARMVIXL::VisitDoubleConstant(
1509 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01001510 // Will be generated at use site.
1511}
1512
Scott Wakelingfe885462016-09-22 10:24:38 +01001513void LocationsBuilderARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1514 memory_barrier->SetLocations(nullptr);
1515}
1516
1517void InstructionCodeGeneratorARMVIXL::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1518 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
1519}
1520
1521void LocationsBuilderARMVIXL::VisitReturnVoid(HReturnVoid* ret) {
1522 ret->SetLocations(nullptr);
1523}
1524
1525void InstructionCodeGeneratorARMVIXL::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
1526 codegen_->GenerateFrameExit();
1527}
1528
1529void LocationsBuilderARMVIXL::VisitReturn(HReturn* ret) {
1530 LocationSummary* locations =
1531 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1532 locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1533}
1534
1535void InstructionCodeGeneratorARMVIXL::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
1536 codegen_->GenerateFrameExit();
1537}
1538
Artem Serovcfbe9132016-10-14 15:58:56 +01001539void LocationsBuilderARMVIXL::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
1540 // The trampoline uses the same calling convention as dex calling conventions,
1541 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
1542 // the method_idx.
1543 HandleInvoke(invoke);
1544}
1545
1546void InstructionCodeGeneratorARMVIXL::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
1547 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
1548}
1549
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001550void LocationsBuilderARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1551 // Explicit clinit checks triggered by static invokes must have been pruned by
1552 // art::PrepareForRegisterAllocation.
1553 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
1554
Anton Kirilov5ec62182016-10-13 20:16:02 +01001555 IntrinsicLocationsBuilderARMVIXL intrinsic(codegen_);
1556 if (intrinsic.TryDispatch(invoke)) {
1557 if (invoke->GetLocations()->CanCall() && invoke->HasPcRelativeDexCache()) {
1558 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
1559 }
1560 return;
1561 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001562
1563 HandleInvoke(invoke);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01001564
1565 // TODO(VIXL): invoke->HasPcRelativeDexCache()
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001566}
1567
Anton Kirilov5ec62182016-10-13 20:16:02 +01001568static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARMVIXL* codegen) {
1569 if (invoke->GetLocations()->Intrinsified()) {
1570 IntrinsicCodeGeneratorARMVIXL intrinsic(codegen);
1571 intrinsic.Dispatch(invoke);
1572 return true;
1573 }
1574 return false;
1575}
1576
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001577void InstructionCodeGeneratorARMVIXL::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1578 // Explicit clinit checks triggered by static invokes must have been pruned by
1579 // art::PrepareForRegisterAllocation.
1580 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
1581
Anton Kirilov5ec62182016-10-13 20:16:02 +01001582 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1583 return;
1584 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001585
1586 LocationSummary* locations = invoke->GetLocations();
1587 DCHECK(locations->HasTemps());
1588 codegen_->GenerateStaticOrDirectCall(invoke, locations->GetTemp(0));
1589 // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the
1590 // previous instruction.
1591 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1592}
1593
1594void LocationsBuilderARMVIXL::HandleInvoke(HInvoke* invoke) {
1595 InvokeDexCallingConventionVisitorARM calling_convention_visitor;
1596 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
1597}
1598
1599void LocationsBuilderARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Anton Kirilov5ec62182016-10-13 20:16:02 +01001600 IntrinsicLocationsBuilderARMVIXL intrinsic(codegen_);
1601 if (intrinsic.TryDispatch(invoke)) {
1602 return;
1603 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001604
1605 HandleInvoke(invoke);
1606}
1607
1608void InstructionCodeGeneratorARMVIXL::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Anton Kirilov5ec62182016-10-13 20:16:02 +01001609 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1610 return;
1611 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001612
1613 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
1614 DCHECK(!codegen_->IsLeafMethod());
1615 // TODO(VIXL): If necessary, use a scope to ensure we record the pc info immediately after the
1616 // previous instruction.
1617 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1618}
1619
Artem Serovcfbe9132016-10-14 15:58:56 +01001620void LocationsBuilderARMVIXL::VisitInvokeInterface(HInvokeInterface* invoke) {
1621 HandleInvoke(invoke);
1622 // Add the hidden argument.
1623 invoke->GetLocations()->AddTemp(LocationFrom(r12));
1624}
1625
1626void InstructionCodeGeneratorARMVIXL::VisitInvokeInterface(HInvokeInterface* invoke) {
1627 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1628 LocationSummary* locations = invoke->GetLocations();
1629 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
1630 vixl32::Register hidden_reg = RegisterFrom(locations->GetTemp(1));
1631 Location receiver = locations->InAt(0);
1632 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1633
1634 DCHECK(!receiver.IsStackSlot());
1635
1636 // /* HeapReference<Class> */ temp = receiver->klass_
1637 GetAssembler()->LoadFromOffset(kLoadWord, temp, RegisterFrom(receiver), class_offset);
1638
1639 codegen_->MaybeRecordImplicitNullCheck(invoke);
1640 // Instead of simply (possibly) unpoisoning `temp` here, we should
1641 // emit a read barrier for the previous class reference load.
1642 // However this is not required in practice, as this is an
1643 // intermediate/temporary reference and because the current
1644 // concurrent copying collector keeps the from-space memory
1645 // intact/accessible until the end of the marking phase (the
1646 // concurrent copying collector may not in the future).
1647 GetAssembler()->MaybeUnpoisonHeapReference(temp);
1648 GetAssembler()->LoadFromOffset(kLoadWord,
1649 temp,
1650 temp,
1651 mirror::Class::ImtPtrOffset(kArmPointerSize).Uint32Value());
1652 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
1653 invoke->GetImtIndex(), kArmPointerSize));
1654 // temp = temp->GetImtEntryAt(method_offset);
1655 GetAssembler()->LoadFromOffset(kLoadWord, temp, temp, method_offset);
1656 uint32_t entry_point =
1657 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value();
1658 // LR = temp->GetEntryPoint();
1659 GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, entry_point);
1660
1661 // Set the hidden (in r12) argument. It is done here, right before a BLX to prevent other
1662 // instruction from clobbering it as they might use r12 as a scratch register.
1663 DCHECK(hidden_reg.Is(r12));
1664 __ Mov(hidden_reg, invoke->GetDexMethodIndex());
1665
1666 {
1667 AssemblerAccurateScope aas(GetVIXLAssembler(),
1668 kArmInstrMaxSizeInBytes,
1669 CodeBufferCheckScope::kMaximumSize);
1670 // LR();
1671 __ blx(lr);
1672 DCHECK(!codegen_->IsLeafMethod());
1673 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1674 }
1675}
1676
Artem Serov02109dd2016-09-23 17:17:54 +01001677void LocationsBuilderARMVIXL::VisitNeg(HNeg* neg) {
1678 LocationSummary* locations =
1679 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1680 switch (neg->GetResultType()) {
1681 case Primitive::kPrimInt: {
1682 locations->SetInAt(0, Location::RequiresRegister());
1683 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1684 break;
1685 }
1686 case Primitive::kPrimLong: {
1687 locations->SetInAt(0, Location::RequiresRegister());
1688 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1689 break;
1690 }
1691
1692 case Primitive::kPrimFloat:
1693 case Primitive::kPrimDouble:
1694 locations->SetInAt(0, Location::RequiresFpuRegister());
1695 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1696 break;
1697
1698 default:
1699 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1700 }
1701}
1702
1703void InstructionCodeGeneratorARMVIXL::VisitNeg(HNeg* neg) {
1704 LocationSummary* locations = neg->GetLocations();
1705 Location out = locations->Out();
1706 Location in = locations->InAt(0);
1707 switch (neg->GetResultType()) {
1708 case Primitive::kPrimInt:
1709 __ Rsb(OutputRegister(neg), InputRegisterAt(neg, 0), 0);
1710 break;
1711
1712 case Primitive::kPrimLong:
1713 // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1714 __ Rsbs(LowRegisterFrom(out), LowRegisterFrom(in), 0);
1715 // We cannot emit an RSC (Reverse Subtract with Carry)
1716 // instruction here, as it does not exist in the Thumb-2
1717 // instruction set. We use the following approach
1718 // using SBC and SUB instead.
1719 //
1720 // out.hi = -C
1721 __ Sbc(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(out));
1722 // out.hi = out.hi - in.hi
1723 __ Sub(HighRegisterFrom(out), HighRegisterFrom(out), HighRegisterFrom(in));
1724 break;
1725
1726 case Primitive::kPrimFloat:
1727 case Primitive::kPrimDouble:
Anton Kirilove28d9ae2016-10-25 18:17:23 +01001728 // TODO(VIXL): Consider introducing an InputVRegister()
1729 // helper function (equivalent to InputRegister()).
Artem Serov02109dd2016-09-23 17:17:54 +01001730 __ Vneg(OutputVRegister(neg), InputVRegisterAt(neg, 0));
1731 break;
1732
1733 default:
1734 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1735 }
1736}
1737
Scott Wakelingfe885462016-09-22 10:24:38 +01001738void LocationsBuilderARMVIXL::VisitTypeConversion(HTypeConversion* conversion) {
1739 Primitive::Type result_type = conversion->GetResultType();
1740 Primitive::Type input_type = conversion->GetInputType();
1741 DCHECK_NE(result_type, input_type);
1742
1743 // The float-to-long, double-to-long and long-to-float type conversions
1744 // rely on a call to the runtime.
1745 LocationSummary::CallKind call_kind =
1746 (((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1747 && result_type == Primitive::kPrimLong)
1748 || (input_type == Primitive::kPrimLong && result_type == Primitive::kPrimFloat))
1749 ? LocationSummary::kCallOnMainOnly
1750 : LocationSummary::kNoCall;
1751 LocationSummary* locations =
1752 new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1753
1754 // The Java language does not allow treating boolean as an integral type but
1755 // our bit representation makes it safe.
1756
1757 switch (result_type) {
1758 case Primitive::kPrimByte:
1759 switch (input_type) {
1760 case Primitive::kPrimLong:
1761 // Type conversion from long to byte is a result of code transformations.
1762 case Primitive::kPrimBoolean:
1763 // Boolean input is a result of code transformations.
1764 case Primitive::kPrimShort:
1765 case Primitive::kPrimInt:
1766 case Primitive::kPrimChar:
1767 // Processing a Dex `int-to-byte' instruction.
1768 locations->SetInAt(0, Location::RequiresRegister());
1769 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1770 break;
1771
1772 default:
1773 LOG(FATAL) << "Unexpected type conversion from " << input_type
1774 << " to " << result_type;
1775 }
1776 break;
1777
1778 case Primitive::kPrimShort:
1779 switch (input_type) {
1780 case Primitive::kPrimLong:
1781 // Type conversion from long to short is a result of code transformations.
1782 case Primitive::kPrimBoolean:
1783 // Boolean input is a result of code transformations.
1784 case Primitive::kPrimByte:
1785 case Primitive::kPrimInt:
1786 case Primitive::kPrimChar:
1787 // Processing a Dex `int-to-short' instruction.
1788 locations->SetInAt(0, Location::RequiresRegister());
1789 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1790 break;
1791
1792 default:
1793 LOG(FATAL) << "Unexpected type conversion from " << input_type
1794 << " to " << result_type;
1795 }
1796 break;
1797
1798 case Primitive::kPrimInt:
1799 switch (input_type) {
1800 case Primitive::kPrimLong:
1801 // Processing a Dex `long-to-int' instruction.
1802 locations->SetInAt(0, Location::Any());
1803 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1804 break;
1805
1806 case Primitive::kPrimFloat:
1807 // Processing a Dex `float-to-int' instruction.
1808 locations->SetInAt(0, Location::RequiresFpuRegister());
1809 locations->SetOut(Location::RequiresRegister());
1810 locations->AddTemp(Location::RequiresFpuRegister());
1811 break;
1812
1813 case Primitive::kPrimDouble:
1814 // Processing a Dex `double-to-int' instruction.
1815 locations->SetInAt(0, Location::RequiresFpuRegister());
1816 locations->SetOut(Location::RequiresRegister());
1817 locations->AddTemp(Location::RequiresFpuRegister());
1818 break;
1819
1820 default:
1821 LOG(FATAL) << "Unexpected type conversion from " << input_type
1822 << " to " << result_type;
1823 }
1824 break;
1825
1826 case Primitive::kPrimLong:
1827 switch (input_type) {
1828 case Primitive::kPrimBoolean:
1829 // Boolean input is a result of code transformations.
1830 case Primitive::kPrimByte:
1831 case Primitive::kPrimShort:
1832 case Primitive::kPrimInt:
1833 case Primitive::kPrimChar:
1834 // Processing a Dex `int-to-long' instruction.
1835 locations->SetInAt(0, Location::RequiresRegister());
1836 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1837 break;
1838
1839 case Primitive::kPrimFloat: {
1840 // Processing a Dex `float-to-long' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001841 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1842 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1843 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01001844 break;
1845 }
1846
1847 case Primitive::kPrimDouble: {
1848 // Processing a Dex `double-to-long' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001849 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1850 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0),
1851 calling_convention.GetFpuRegisterAt(1)));
1852 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01001853 break;
1854 }
1855
1856 default:
1857 LOG(FATAL) << "Unexpected type conversion from " << input_type
1858 << " to " << result_type;
1859 }
1860 break;
1861
1862 case Primitive::kPrimChar:
1863 switch (input_type) {
1864 case Primitive::kPrimLong:
1865 // Type conversion from long to char is a result of code transformations.
1866 case Primitive::kPrimBoolean:
1867 // Boolean input is a result of code transformations.
1868 case Primitive::kPrimByte:
1869 case Primitive::kPrimShort:
1870 case Primitive::kPrimInt:
1871 // Processing a Dex `int-to-char' instruction.
1872 locations->SetInAt(0, Location::RequiresRegister());
1873 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1874 break;
1875
1876 default:
1877 LOG(FATAL) << "Unexpected type conversion from " << input_type
1878 << " to " << result_type;
1879 }
1880 break;
1881
1882 case Primitive::kPrimFloat:
1883 switch (input_type) {
1884 case Primitive::kPrimBoolean:
1885 // Boolean input is a result of code transformations.
1886 case Primitive::kPrimByte:
1887 case Primitive::kPrimShort:
1888 case Primitive::kPrimInt:
1889 case Primitive::kPrimChar:
1890 // Processing a Dex `int-to-float' instruction.
1891 locations->SetInAt(0, Location::RequiresRegister());
1892 locations->SetOut(Location::RequiresFpuRegister());
1893 break;
1894
1895 case Primitive::kPrimLong: {
1896 // Processing a Dex `long-to-float' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001897 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1898 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0),
1899 calling_convention.GetRegisterAt(1)));
1900 locations->SetOut(LocationFrom(calling_convention.GetFpuRegisterAt(0)));
Scott Wakelingfe885462016-09-22 10:24:38 +01001901 break;
1902 }
1903
1904 case Primitive::kPrimDouble:
1905 // Processing a Dex `double-to-float' instruction.
1906 locations->SetInAt(0, Location::RequiresFpuRegister());
1907 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1908 break;
1909
1910 default:
1911 LOG(FATAL) << "Unexpected type conversion from " << input_type
1912 << " to " << result_type;
1913 };
1914 break;
1915
1916 case Primitive::kPrimDouble:
1917 switch (input_type) {
1918 case Primitive::kPrimBoolean:
1919 // Boolean input is a result of code transformations.
1920 case Primitive::kPrimByte:
1921 case Primitive::kPrimShort:
1922 case Primitive::kPrimInt:
1923 case Primitive::kPrimChar:
1924 // Processing a Dex `int-to-double' instruction.
1925 locations->SetInAt(0, Location::RequiresRegister());
1926 locations->SetOut(Location::RequiresFpuRegister());
1927 break;
1928
1929 case Primitive::kPrimLong:
1930 // Processing a Dex `long-to-double' instruction.
1931 locations->SetInAt(0, Location::RequiresRegister());
1932 locations->SetOut(Location::RequiresFpuRegister());
1933 locations->AddTemp(Location::RequiresFpuRegister());
1934 locations->AddTemp(Location::RequiresFpuRegister());
1935 break;
1936
1937 case Primitive::kPrimFloat:
1938 // Processing a Dex `float-to-double' instruction.
1939 locations->SetInAt(0, Location::RequiresFpuRegister());
1940 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1941 break;
1942
1943 default:
1944 LOG(FATAL) << "Unexpected type conversion from " << input_type
1945 << " to " << result_type;
1946 };
1947 break;
1948
1949 default:
1950 LOG(FATAL) << "Unexpected type conversion from " << input_type
1951 << " to " << result_type;
1952 }
1953}
1954
1955void InstructionCodeGeneratorARMVIXL::VisitTypeConversion(HTypeConversion* conversion) {
1956 LocationSummary* locations = conversion->GetLocations();
1957 Location out = locations->Out();
1958 Location in = locations->InAt(0);
1959 Primitive::Type result_type = conversion->GetResultType();
1960 Primitive::Type input_type = conversion->GetInputType();
1961 DCHECK_NE(result_type, input_type);
1962 switch (result_type) {
1963 case Primitive::kPrimByte:
1964 switch (input_type) {
1965 case Primitive::kPrimLong:
1966 // Type conversion from long to byte is a result of code transformations.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001967 __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 8);
Scott Wakelingfe885462016-09-22 10:24:38 +01001968 break;
1969 case Primitive::kPrimBoolean:
1970 // Boolean input is a result of code transformations.
1971 case Primitive::kPrimShort:
1972 case Primitive::kPrimInt:
1973 case Primitive::kPrimChar:
1974 // Processing a Dex `int-to-byte' instruction.
1975 __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 8);
1976 break;
1977
1978 default:
1979 LOG(FATAL) << "Unexpected type conversion from " << input_type
1980 << " to " << result_type;
1981 }
1982 break;
1983
1984 case Primitive::kPrimShort:
1985 switch (input_type) {
1986 case Primitive::kPrimLong:
1987 // Type conversion from long to short is a result of code transformations.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01001988 __ Sbfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16);
Scott Wakelingfe885462016-09-22 10:24:38 +01001989 break;
1990 case Primitive::kPrimBoolean:
1991 // Boolean input is a result of code transformations.
1992 case Primitive::kPrimByte:
1993 case Primitive::kPrimInt:
1994 case Primitive::kPrimChar:
1995 // Processing a Dex `int-to-short' instruction.
1996 __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16);
1997 break;
1998
1999 default:
2000 LOG(FATAL) << "Unexpected type conversion from " << input_type
2001 << " to " << result_type;
2002 }
2003 break;
2004
2005 case Primitive::kPrimInt:
2006 switch (input_type) {
2007 case Primitive::kPrimLong:
2008 // Processing a Dex `long-to-int' instruction.
2009 DCHECK(out.IsRegister());
2010 if (in.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002011 __ Mov(OutputRegister(conversion), LowRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01002012 } else if (in.IsDoubleStackSlot()) {
2013 GetAssembler()->LoadFromOffset(kLoadWord,
2014 OutputRegister(conversion),
2015 sp,
2016 in.GetStackIndex());
2017 } else {
2018 DCHECK(in.IsConstant());
2019 DCHECK(in.GetConstant()->IsLongConstant());
2020 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2021 __ Mov(OutputRegister(conversion), static_cast<int32_t>(value));
2022 }
2023 break;
2024
2025 case Primitive::kPrimFloat: {
2026 // Processing a Dex `float-to-int' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002027 vixl32::SRegister temp = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01002028 __ Vcvt(S32, F32, temp, InputSRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01002029 __ Vmov(OutputRegister(conversion), temp);
2030 break;
2031 }
2032
2033 case Primitive::kPrimDouble: {
2034 // Processing a Dex `double-to-int' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002035 vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01002036 __ Vcvt(S32, F64, temp_s, DRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01002037 __ Vmov(OutputRegister(conversion), temp_s);
2038 break;
2039 }
2040
2041 default:
2042 LOG(FATAL) << "Unexpected type conversion from " << input_type
2043 << " to " << result_type;
2044 }
2045 break;
2046
2047 case Primitive::kPrimLong:
2048 switch (input_type) {
2049 case Primitive::kPrimBoolean:
2050 // Boolean input is a result of code transformations.
2051 case Primitive::kPrimByte:
2052 case Primitive::kPrimShort:
2053 case Primitive::kPrimInt:
2054 case Primitive::kPrimChar:
2055 // Processing a Dex `int-to-long' instruction.
2056 DCHECK(out.IsRegisterPair());
2057 DCHECK(in.IsRegister());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002058 __ Mov(LowRegisterFrom(out), InputRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01002059 // Sign extension.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002060 __ Asr(HighRegisterFrom(out), LowRegisterFrom(out), 31);
Scott Wakelingfe885462016-09-22 10:24:38 +01002061 break;
2062
2063 case Primitive::kPrimFloat:
2064 // Processing a Dex `float-to-long' instruction.
2065 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
2066 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
2067 break;
2068
2069 case Primitive::kPrimDouble:
2070 // Processing a Dex `double-to-long' instruction.
2071 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
2072 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
2073 break;
2074
2075 default:
2076 LOG(FATAL) << "Unexpected type conversion from " << input_type
2077 << " to " << result_type;
2078 }
2079 break;
2080
2081 case Primitive::kPrimChar:
2082 switch (input_type) {
2083 case Primitive::kPrimLong:
2084 // Type conversion from long to char is a result of code transformations.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002085 __ Ubfx(OutputRegister(conversion), LowRegisterFrom(in), 0, 16);
Scott Wakelingfe885462016-09-22 10:24:38 +01002086 break;
2087 case Primitive::kPrimBoolean:
2088 // Boolean input is a result of code transformations.
2089 case Primitive::kPrimByte:
2090 case Primitive::kPrimShort:
2091 case Primitive::kPrimInt:
2092 // Processing a Dex `int-to-char' instruction.
2093 __ Ubfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, 16);
2094 break;
2095
2096 default:
2097 LOG(FATAL) << "Unexpected type conversion from " << input_type
2098 << " to " << result_type;
2099 }
2100 break;
2101
2102 case Primitive::kPrimFloat:
2103 switch (input_type) {
2104 case Primitive::kPrimBoolean:
2105 // Boolean input is a result of code transformations.
2106 case Primitive::kPrimByte:
2107 case Primitive::kPrimShort:
2108 case Primitive::kPrimInt:
2109 case Primitive::kPrimChar: {
2110 // Processing a Dex `int-to-float' instruction.
2111 __ Vmov(OutputSRegister(conversion), InputRegisterAt(conversion, 0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01002112 __ Vcvt(F32, S32, OutputSRegister(conversion), OutputSRegister(conversion));
Scott Wakelingfe885462016-09-22 10:24:38 +01002113 break;
2114 }
2115
2116 case Primitive::kPrimLong:
2117 // Processing a Dex `long-to-float' instruction.
2118 codegen_->InvokeRuntime(kQuickL2f, conversion, conversion->GetDexPc());
2119 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
2120 break;
2121
2122 case Primitive::kPrimDouble:
2123 // Processing a Dex `double-to-float' instruction.
Scott Wakelingc34dba72016-10-03 10:14:44 +01002124 __ Vcvt(F32, F64, OutputSRegister(conversion), DRegisterFrom(in));
Scott Wakelingfe885462016-09-22 10:24:38 +01002125 break;
2126
2127 default:
2128 LOG(FATAL) << "Unexpected type conversion from " << input_type
2129 << " to " << result_type;
2130 };
2131 break;
2132
2133 case Primitive::kPrimDouble:
2134 switch (input_type) {
2135 case Primitive::kPrimBoolean:
2136 // Boolean input is a result of code transformations.
2137 case Primitive::kPrimByte:
2138 case Primitive::kPrimShort:
2139 case Primitive::kPrimInt:
2140 case Primitive::kPrimChar: {
2141 // Processing a Dex `int-to-double' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002142 __ Vmov(LowSRegisterFrom(out), InputRegisterAt(conversion, 0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01002143 __ Vcvt(F64, S32, DRegisterFrom(out), LowSRegisterFrom(out));
Scott Wakelingfe885462016-09-22 10:24:38 +01002144 break;
2145 }
2146
2147 case Primitive::kPrimLong: {
2148 // Processing a Dex `long-to-double' instruction.
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002149 vixl32::Register low = LowRegisterFrom(in);
2150 vixl32::Register high = HighRegisterFrom(in);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002151 vixl32::SRegister out_s = LowSRegisterFrom(out);
Scott Wakelingc34dba72016-10-03 10:14:44 +01002152 vixl32::DRegister out_d = DRegisterFrom(out);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002153 vixl32::SRegister temp_s = LowSRegisterFrom(locations->GetTemp(0));
Scott Wakelingc34dba72016-10-03 10:14:44 +01002154 vixl32::DRegister temp_d = DRegisterFrom(locations->GetTemp(0));
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01002155 vixl32::DRegister constant_d = DRegisterFrom(locations->GetTemp(1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002156
2157 // temp_d = int-to-double(high)
2158 __ Vmov(temp_s, high);
Scott Wakelingfb0b7d42016-10-28 16:11:08 +01002159 __ Vcvt(F64, S32, temp_d, temp_s);
Scott Wakelingfe885462016-09-22 10:24:38 +01002160 // constant_d = k2Pow32EncodingForDouble
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002161 __ Vmov(constant_d, bit_cast<double, int64_t>(k2Pow32EncodingForDouble));
Scott Wakelingfe885462016-09-22 10:24:38 +01002162 // out_d = unsigned-to-double(low)
2163 __ Vmov(out_s, low);
2164 __ Vcvt(F64, U32, out_d, out_s);
2165 // out_d += temp_d * constant_d
2166 __ Vmla(F64, out_d, temp_d, constant_d);
2167 break;
2168 }
2169
2170 case Primitive::kPrimFloat:
2171 // Processing a Dex `float-to-double' instruction.
Scott Wakelingc34dba72016-10-03 10:14:44 +01002172 __ Vcvt(F64, F32, DRegisterFrom(out), InputSRegisterAt(conversion, 0));
Scott Wakelingfe885462016-09-22 10:24:38 +01002173 break;
2174
2175 default:
2176 LOG(FATAL) << "Unexpected type conversion from " << input_type
2177 << " to " << result_type;
2178 };
2179 break;
2180
2181 default:
2182 LOG(FATAL) << "Unexpected type conversion from " << input_type
2183 << " to " << result_type;
2184 }
2185}
2186
2187void LocationsBuilderARMVIXL::VisitAdd(HAdd* add) {
2188 LocationSummary* locations =
2189 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
2190 switch (add->GetResultType()) {
2191 case Primitive::kPrimInt: {
2192 locations->SetInAt(0, Location::RequiresRegister());
2193 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2194 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2195 break;
2196 }
2197
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002198 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01002199 case Primitive::kPrimLong: {
2200 locations->SetInAt(0, Location::RequiresRegister());
2201 locations->SetInAt(1, Location::RequiresRegister());
2202 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2203 break;
2204 }
2205
2206 case Primitive::kPrimFloat:
2207 case Primitive::kPrimDouble: {
2208 locations->SetInAt(0, Location::RequiresFpuRegister());
2209 locations->SetInAt(1, Location::RequiresFpuRegister());
2210 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2211 break;
2212 }
2213
2214 default:
2215 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
2216 }
2217}
2218
2219void InstructionCodeGeneratorARMVIXL::VisitAdd(HAdd* add) {
2220 LocationSummary* locations = add->GetLocations();
2221 Location out = locations->Out();
2222 Location first = locations->InAt(0);
2223 Location second = locations->InAt(1);
2224
2225 switch (add->GetResultType()) {
2226 case Primitive::kPrimInt: {
2227 __ Add(OutputRegister(add), InputRegisterAt(add, 0), InputOperandAt(add, 1));
2228 }
2229 break;
2230
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002231 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01002232 case Primitive::kPrimLong: {
2233 DCHECK(second.IsRegisterPair());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002234 __ Adds(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
2235 __ Adc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
Scott Wakelingfe885462016-09-22 10:24:38 +01002236 break;
2237 }
2238
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002239 case Primitive::kPrimFloat:
Scott Wakelingfe885462016-09-22 10:24:38 +01002240 case Primitive::kPrimDouble:
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002241 __ Vadd(OutputVRegister(add), InputVRegisterAt(add, 0), InputVRegisterAt(add, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002242 break;
2243
2244 default:
2245 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
2246 }
2247}
2248
2249void LocationsBuilderARMVIXL::VisitSub(HSub* sub) {
2250 LocationSummary* locations =
2251 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
2252 switch (sub->GetResultType()) {
2253 case Primitive::kPrimInt: {
2254 locations->SetInAt(0, Location::RequiresRegister());
2255 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
2256 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2257 break;
2258 }
2259
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002260 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01002261 case Primitive::kPrimLong: {
2262 locations->SetInAt(0, Location::RequiresRegister());
2263 locations->SetInAt(1, Location::RequiresRegister());
2264 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2265 break;
2266 }
2267 case Primitive::kPrimFloat:
2268 case Primitive::kPrimDouble: {
2269 locations->SetInAt(0, Location::RequiresFpuRegister());
2270 locations->SetInAt(1, Location::RequiresFpuRegister());
2271 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2272 break;
2273 }
2274 default:
2275 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
2276 }
2277}
2278
2279void InstructionCodeGeneratorARMVIXL::VisitSub(HSub* sub) {
2280 LocationSummary* locations = sub->GetLocations();
2281 Location out = locations->Out();
2282 Location first = locations->InAt(0);
2283 Location second = locations->InAt(1);
2284 switch (sub->GetResultType()) {
2285 case Primitive::kPrimInt: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002286 __ Sub(OutputRegister(sub), InputRegisterAt(sub, 0), InputOperandAt(sub, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002287 break;
2288 }
2289
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002290 // TODO(VIXL): https://android-review.googlesource.com/#/c/254144/
Scott Wakelingfe885462016-09-22 10:24:38 +01002291 case Primitive::kPrimLong: {
2292 DCHECK(second.IsRegisterPair());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002293 __ Subs(LowRegisterFrom(out), LowRegisterFrom(first), LowRegisterFrom(second));
2294 __ Sbc(HighRegisterFrom(out), HighRegisterFrom(first), HighRegisterFrom(second));
Scott Wakelingfe885462016-09-22 10:24:38 +01002295 break;
2296 }
2297
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002298 case Primitive::kPrimFloat:
2299 case Primitive::kPrimDouble:
2300 __ Vsub(OutputVRegister(sub), InputVRegisterAt(sub, 0), InputVRegisterAt(sub, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002301 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01002302
2303 default:
2304 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
2305 }
2306}
2307
2308void LocationsBuilderARMVIXL::VisitMul(HMul* mul) {
2309 LocationSummary* locations =
2310 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2311 switch (mul->GetResultType()) {
2312 case Primitive::kPrimInt:
2313 case Primitive::kPrimLong: {
2314 locations->SetInAt(0, Location::RequiresRegister());
2315 locations->SetInAt(1, Location::RequiresRegister());
2316 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2317 break;
2318 }
2319
2320 case Primitive::kPrimFloat:
2321 case Primitive::kPrimDouble: {
2322 locations->SetInAt(0, Location::RequiresFpuRegister());
2323 locations->SetInAt(1, Location::RequiresFpuRegister());
2324 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2325 break;
2326 }
2327
2328 default:
2329 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2330 }
2331}
2332
2333void InstructionCodeGeneratorARMVIXL::VisitMul(HMul* mul) {
2334 LocationSummary* locations = mul->GetLocations();
2335 Location out = locations->Out();
2336 Location first = locations->InAt(0);
2337 Location second = locations->InAt(1);
2338 switch (mul->GetResultType()) {
2339 case Primitive::kPrimInt: {
2340 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2341 break;
2342 }
2343 case Primitive::kPrimLong: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002344 vixl32::Register out_hi = HighRegisterFrom(out);
2345 vixl32::Register out_lo = LowRegisterFrom(out);
2346 vixl32::Register in1_hi = HighRegisterFrom(first);
2347 vixl32::Register in1_lo = LowRegisterFrom(first);
2348 vixl32::Register in2_hi = HighRegisterFrom(second);
2349 vixl32::Register in2_lo = LowRegisterFrom(second);
Scott Wakelingfe885462016-09-22 10:24:38 +01002350
2351 // Extra checks to protect caused by the existence of R1_R2.
2352 // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2353 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2354 DCHECK_NE(out_hi.GetCode(), in1_lo.GetCode());
2355 DCHECK_NE(out_hi.GetCode(), in2_lo.GetCode());
2356
2357 // input: in1 - 64 bits, in2 - 64 bits
2358 // output: out
2359 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2360 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2361 // parts: out.lo = (in1.lo * in2.lo)[31:0]
2362
2363 UseScratchRegisterScope temps(GetVIXLAssembler());
2364 vixl32::Register temp = temps.Acquire();
2365 // temp <- in1.lo * in2.hi
2366 __ Mul(temp, in1_lo, in2_hi);
2367 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2368 __ Mla(out_hi, in1_hi, in2_lo, temp);
2369 // out.lo <- (in1.lo * in2.lo)[31:0];
2370 __ Umull(out_lo, temp, in1_lo, in2_lo);
2371 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002372 __ Add(out_hi, out_hi, temp);
Scott Wakelingfe885462016-09-22 10:24:38 +01002373 break;
2374 }
2375
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002376 case Primitive::kPrimFloat:
2377 case Primitive::kPrimDouble:
2378 __ Vmul(OutputVRegister(mul), InputVRegisterAt(mul, 0), InputVRegisterAt(mul, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002379 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01002380
2381 default:
2382 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2383 }
2384}
2385
Scott Wakelingfe885462016-09-22 10:24:38 +01002386void InstructionCodeGeneratorARMVIXL::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2387 DCHECK(instruction->IsDiv() || instruction->IsRem());
2388 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2389
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002390 Location second = instruction->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01002391 DCHECK(second.IsConstant());
2392
2393 vixl32::Register out = OutputRegister(instruction);
2394 vixl32::Register dividend = InputRegisterAt(instruction, 0);
2395 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2396 DCHECK(imm == 1 || imm == -1);
2397
2398 if (instruction->IsRem()) {
2399 __ Mov(out, 0);
2400 } else {
2401 if (imm == 1) {
2402 __ Mov(out, dividend);
2403 } else {
2404 __ Rsb(out, dividend, 0);
2405 }
2406 }
2407}
2408
2409void InstructionCodeGeneratorARMVIXL::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2410 DCHECK(instruction->IsDiv() || instruction->IsRem());
2411 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2412
2413 LocationSummary* locations = instruction->GetLocations();
2414 Location second = locations->InAt(1);
2415 DCHECK(second.IsConstant());
2416
2417 vixl32::Register out = OutputRegister(instruction);
2418 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002419 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
Scott Wakelingfe885462016-09-22 10:24:38 +01002420 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2421 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
2422 int ctz_imm = CTZ(abs_imm);
2423
2424 if (ctz_imm == 1) {
2425 __ Lsr(temp, dividend, 32 - ctz_imm);
2426 } else {
2427 __ Asr(temp, dividend, 31);
2428 __ Lsr(temp, temp, 32 - ctz_imm);
2429 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002430 __ Add(out, temp, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01002431
2432 if (instruction->IsDiv()) {
2433 __ Asr(out, out, ctz_imm);
2434 if (imm < 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002435 __ Rsb(out, out, 0);
Scott Wakelingfe885462016-09-22 10:24:38 +01002436 }
2437 } else {
2438 __ Ubfx(out, out, 0, ctz_imm);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002439 __ Sub(out, out, temp);
Scott Wakelingfe885462016-09-22 10:24:38 +01002440 }
2441}
2442
2443void InstructionCodeGeneratorARMVIXL::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2444 DCHECK(instruction->IsDiv() || instruction->IsRem());
2445 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2446
2447 LocationSummary* locations = instruction->GetLocations();
2448 Location second = locations->InAt(1);
2449 DCHECK(second.IsConstant());
2450
2451 vixl32::Register out = OutputRegister(instruction);
2452 vixl32::Register dividend = InputRegisterAt(instruction, 0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002453 vixl32::Register temp1 = RegisterFrom(locations->GetTemp(0));
2454 vixl32::Register temp2 = RegisterFrom(locations->GetTemp(1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002455 int64_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2456
2457 int64_t magic;
2458 int shift;
2459 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
2460
2461 __ Mov(temp1, magic);
2462 __ Smull(temp2, temp1, dividend, temp1);
2463
2464 if (imm > 0 && magic < 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002465 __ Add(temp1, temp1, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01002466 } else if (imm < 0 && magic > 0) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002467 __ Sub(temp1, temp1, dividend);
Scott Wakelingfe885462016-09-22 10:24:38 +01002468 }
2469
2470 if (shift != 0) {
2471 __ Asr(temp1, temp1, shift);
2472 }
2473
2474 if (instruction->IsDiv()) {
2475 __ Sub(out, temp1, Operand(temp1, vixl32::Shift(ASR), 31));
2476 } else {
2477 __ Sub(temp1, temp1, Operand(temp1, vixl32::Shift(ASR), 31));
2478 // TODO: Strength reduction for mls.
2479 __ Mov(temp2, imm);
2480 __ Mls(out, temp1, temp2, dividend);
2481 }
2482}
2483
2484void InstructionCodeGeneratorARMVIXL::GenerateDivRemConstantIntegral(
2485 HBinaryOperation* instruction) {
2486 DCHECK(instruction->IsDiv() || instruction->IsRem());
2487 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2488
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002489 Location second = instruction->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01002490 DCHECK(second.IsConstant());
2491
2492 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2493 if (imm == 0) {
2494 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2495 } else if (imm == 1 || imm == -1) {
2496 DivRemOneOrMinusOne(instruction);
2497 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
2498 DivRemByPowerOfTwo(instruction);
2499 } else {
2500 DCHECK(imm <= -2 || imm >= 2);
2501 GenerateDivRemWithAnyConstant(instruction);
2502 }
2503}
2504
2505void LocationsBuilderARMVIXL::VisitDiv(HDiv* div) {
2506 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2507 if (div->GetResultType() == Primitive::kPrimLong) {
2508 // pLdiv runtime call.
2509 call_kind = LocationSummary::kCallOnMainOnly;
2510 } else if (div->GetResultType() == Primitive::kPrimInt && div->InputAt(1)->IsConstant()) {
2511 // sdiv will be replaced by other instruction sequence.
2512 } else if (div->GetResultType() == Primitive::kPrimInt &&
2513 !codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2514 // pIdivmod runtime call.
2515 call_kind = LocationSummary::kCallOnMainOnly;
2516 }
2517
2518 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2519
2520 switch (div->GetResultType()) {
2521 case Primitive::kPrimInt: {
2522 if (div->InputAt(1)->IsConstant()) {
2523 locations->SetInAt(0, Location::RequiresRegister());
2524 locations->SetInAt(1, Location::ConstantLocation(div->InputAt(1)->AsConstant()));
2525 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2526 int32_t value = div->InputAt(1)->AsIntConstant()->GetValue();
2527 if (value == 1 || value == 0 || value == -1) {
2528 // No temp register required.
2529 } else {
2530 locations->AddTemp(Location::RequiresRegister());
2531 if (!IsPowerOfTwo(AbsOrMin(value))) {
2532 locations->AddTemp(Location::RequiresRegister());
2533 }
2534 }
2535 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2536 locations->SetInAt(0, Location::RequiresRegister());
2537 locations->SetInAt(1, Location::RequiresRegister());
2538 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2539 } else {
Artem Serov551b28f2016-10-18 19:11:30 +01002540 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2541 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2542 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
2543 // Note: divrem will compute both the quotient and the remainder as the pair R0 and R1, but
2544 // we only need the former.
2545 locations->SetOut(LocationFrom(r0));
Scott Wakelingfe885462016-09-22 10:24:38 +01002546 }
2547 break;
2548 }
2549 case Primitive::kPrimLong: {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01002550 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2551 locations->SetInAt(0, LocationFrom(
2552 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2553 locations->SetInAt(1, LocationFrom(
2554 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2555 locations->SetOut(LocationFrom(r0, r1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002556 break;
2557 }
2558 case Primitive::kPrimFloat:
2559 case Primitive::kPrimDouble: {
2560 locations->SetInAt(0, Location::RequiresFpuRegister());
2561 locations->SetInAt(1, Location::RequiresFpuRegister());
2562 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2563 break;
2564 }
2565
2566 default:
2567 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2568 }
2569}
2570
2571void InstructionCodeGeneratorARMVIXL::VisitDiv(HDiv* div) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01002572 Location lhs = div->GetLocations()->InAt(0);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002573 Location rhs = div->GetLocations()->InAt(1);
Scott Wakelingfe885462016-09-22 10:24:38 +01002574
2575 switch (div->GetResultType()) {
2576 case Primitive::kPrimInt: {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002577 if (rhs.IsConstant()) {
Scott Wakelingfe885462016-09-22 10:24:38 +01002578 GenerateDivRemConstantIntegral(div);
2579 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2580 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
2581 } else {
Artem Serov551b28f2016-10-18 19:11:30 +01002582 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2583 DCHECK(calling_convention.GetRegisterAt(0).Is(RegisterFrom(lhs)));
2584 DCHECK(calling_convention.GetRegisterAt(1).Is(RegisterFrom(rhs)));
2585 DCHECK(r0.Is(OutputRegister(div)));
2586
2587 codegen_->InvokeRuntime(kQuickIdivmod, div, div->GetDexPc());
2588 CheckEntrypointTypes<kQuickIdivmod, int32_t, int32_t, int32_t>();
Scott Wakelingfe885462016-09-22 10:24:38 +01002589 }
2590 break;
2591 }
2592
2593 case Primitive::kPrimLong: {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01002594 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2595 DCHECK(calling_convention.GetRegisterAt(0).Is(LowRegisterFrom(lhs)));
2596 DCHECK(calling_convention.GetRegisterAt(1).Is(HighRegisterFrom(lhs)));
2597 DCHECK(calling_convention.GetRegisterAt(2).Is(LowRegisterFrom(rhs)));
2598 DCHECK(calling_convention.GetRegisterAt(3).Is(HighRegisterFrom(rhs)));
2599 DCHECK(LowRegisterFrom(div->GetLocations()->Out()).Is(r0));
2600 DCHECK(HighRegisterFrom(div->GetLocations()->Out()).Is(r1));
2601
2602 codegen_->InvokeRuntime(kQuickLdiv, div, div->GetDexPc());
2603 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
Scott Wakelingfe885462016-09-22 10:24:38 +01002604 break;
2605 }
2606
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002607 case Primitive::kPrimFloat:
2608 case Primitive::kPrimDouble:
2609 __ Vdiv(OutputVRegister(div), InputVRegisterAt(div, 0), InputVRegisterAt(div, 1));
Scott Wakelingfe885462016-09-22 10:24:38 +01002610 break;
Scott Wakelingfe885462016-09-22 10:24:38 +01002611
2612 default:
2613 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2614 }
2615}
2616
Artem Serov551b28f2016-10-18 19:11:30 +01002617void LocationsBuilderARMVIXL::VisitRem(HRem* rem) {
2618 Primitive::Type type = rem->GetResultType();
2619
2620 // Most remainders are implemented in the runtime.
2621 LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly;
2622 if (rem->GetResultType() == Primitive::kPrimInt && rem->InputAt(1)->IsConstant()) {
2623 // sdiv will be replaced by other instruction sequence.
2624 call_kind = LocationSummary::kNoCall;
2625 } else if ((rem->GetResultType() == Primitive::kPrimInt)
2626 && codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2627 // Have hardware divide instruction for int, do it with three instructions.
2628 call_kind = LocationSummary::kNoCall;
2629 }
2630
2631 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2632
2633 switch (type) {
2634 case Primitive::kPrimInt: {
2635 if (rem->InputAt(1)->IsConstant()) {
2636 locations->SetInAt(0, Location::RequiresRegister());
2637 locations->SetInAt(1, Location::ConstantLocation(rem->InputAt(1)->AsConstant()));
2638 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2639 int32_t value = rem->InputAt(1)->AsIntConstant()->GetValue();
2640 if (value == 1 || value == 0 || value == -1) {
2641 // No temp register required.
2642 } else {
2643 locations->AddTemp(Location::RequiresRegister());
2644 if (!IsPowerOfTwo(AbsOrMin(value))) {
2645 locations->AddTemp(Location::RequiresRegister());
2646 }
2647 }
2648 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2649 locations->SetInAt(0, Location::RequiresRegister());
2650 locations->SetInAt(1, Location::RequiresRegister());
2651 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2652 locations->AddTemp(Location::RequiresRegister());
2653 } else {
2654 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2655 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2656 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
2657 // Note: divrem will compute both the quotient and the remainder as the pair R0 and R1, but
2658 // we only need the latter.
2659 locations->SetOut(LocationFrom(r1));
2660 }
2661 break;
2662 }
2663 case Primitive::kPrimLong: {
2664 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2665 locations->SetInAt(0, LocationFrom(
2666 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2667 locations->SetInAt(1, LocationFrom(
2668 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2669 // The runtime helper puts the output in R2,R3.
2670 locations->SetOut(LocationFrom(r2, r3));
2671 break;
2672 }
2673 case Primitive::kPrimFloat: {
2674 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2675 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
2676 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
2677 locations->SetOut(LocationFrom(s0));
2678 break;
2679 }
2680
2681 case Primitive::kPrimDouble: {
2682 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2683 locations->SetInAt(0, LocationFrom(
2684 calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2685 locations->SetInAt(1, LocationFrom(
2686 calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2687 locations->SetOut(LocationFrom(s0, s1));
2688 break;
2689 }
2690
2691 default:
2692 LOG(FATAL) << "Unexpected rem type " << type;
2693 }
2694}
2695
2696void InstructionCodeGeneratorARMVIXL::VisitRem(HRem* rem) {
2697 LocationSummary* locations = rem->GetLocations();
2698 Location second = locations->InAt(1);
2699
2700 Primitive::Type type = rem->GetResultType();
2701 switch (type) {
2702 case Primitive::kPrimInt: {
2703 vixl32::Register reg1 = InputRegisterAt(rem, 0);
2704 vixl32::Register out_reg = OutputRegister(rem);
2705 if (second.IsConstant()) {
2706 GenerateDivRemConstantIntegral(rem);
2707 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2708 vixl32::Register reg2 = RegisterFrom(second);
2709 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
2710
2711 // temp = reg1 / reg2 (integer division)
2712 // dest = reg1 - temp * reg2
2713 __ Sdiv(temp, reg1, reg2);
2714 __ Mls(out_reg, temp, reg2, reg1);
2715 } else {
2716 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2717 DCHECK(reg1.Is(calling_convention.GetRegisterAt(0)));
2718 DCHECK(RegisterFrom(second).Is(calling_convention.GetRegisterAt(1)));
2719 DCHECK(out_reg.Is(r1));
2720
2721 codegen_->InvokeRuntime(kQuickIdivmod, rem, rem->GetDexPc());
2722 CheckEntrypointTypes<kQuickIdivmod, int32_t, int32_t, int32_t>();
2723 }
2724 break;
2725 }
2726
2727 case Primitive::kPrimLong: {
2728 codegen_->InvokeRuntime(kQuickLmod, rem, rem->GetDexPc());
2729 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
2730 break;
2731 }
2732
2733 case Primitive::kPrimFloat: {
2734 codegen_->InvokeRuntime(kQuickFmodf, rem, rem->GetDexPc());
2735 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
2736 break;
2737 }
2738
2739 case Primitive::kPrimDouble: {
2740 codegen_->InvokeRuntime(kQuickFmod, rem, rem->GetDexPc());
2741 CheckEntrypointTypes<kQuickFmod, double, double, double>();
2742 break;
2743 }
2744
2745 default:
2746 LOG(FATAL) << "Unexpected rem type " << type;
2747 }
2748}
2749
2750
Scott Wakelingfe885462016-09-22 10:24:38 +01002751void LocationsBuilderARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002752 // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/
Scott Wakelingfe885462016-09-22 10:24:38 +01002753 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2754 ? LocationSummary::kCallOnSlowPath
2755 : LocationSummary::kNoCall;
2756 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2757 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2758 if (instruction->HasUses()) {
2759 locations->SetOut(Location::SameAsFirstInput());
2760 }
2761}
2762
2763void InstructionCodeGeneratorARMVIXL::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2764 DivZeroCheckSlowPathARMVIXL* slow_path =
2765 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARMVIXL(instruction);
2766 codegen_->AddSlowPath(slow_path);
2767
2768 LocationSummary* locations = instruction->GetLocations();
2769 Location value = locations->InAt(0);
2770
2771 switch (instruction->GetType()) {
2772 case Primitive::kPrimBoolean:
2773 case Primitive::kPrimByte:
2774 case Primitive::kPrimChar:
2775 case Primitive::kPrimShort:
2776 case Primitive::kPrimInt: {
2777 if (value.IsRegister()) {
2778 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2779 } else {
2780 DCHECK(value.IsConstant()) << value;
2781 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2782 __ B(slow_path->GetEntryLabel());
2783 }
2784 }
2785 break;
2786 }
2787 case Primitive::kPrimLong: {
2788 if (value.IsRegisterPair()) {
2789 UseScratchRegisterScope temps(GetVIXLAssembler());
2790 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01002791 __ Orrs(temp, LowRegisterFrom(value), HighRegisterFrom(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01002792 __ B(eq, slow_path->GetEntryLabel());
2793 } else {
2794 DCHECK(value.IsConstant()) << value;
2795 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2796 __ B(slow_path->GetEntryLabel());
2797 }
2798 }
2799 break;
2800 }
2801 default:
2802 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2803 }
2804}
2805
Artem Serov02109dd2016-09-23 17:17:54 +01002806void InstructionCodeGeneratorARMVIXL::HandleIntegerRotate(HRor* ror) {
2807 LocationSummary* locations = ror->GetLocations();
2808 vixl32::Register in = InputRegisterAt(ror, 0);
2809 Location rhs = locations->InAt(1);
2810 vixl32::Register out = OutputRegister(ror);
2811
2812 if (rhs.IsConstant()) {
2813 // Arm32 and Thumb2 assemblers require a rotation on the interval [1,31],
2814 // so map all rotations to a +ve. equivalent in that range.
2815 // (e.g. left *or* right by -2 bits == 30 bits in the same direction.)
2816 uint32_t rot = CodeGenerator::GetInt32ValueOf(rhs.GetConstant()) & 0x1F;
2817 if (rot) {
2818 // Rotate, mapping left rotations to right equivalents if necessary.
2819 // (e.g. left by 2 bits == right by 30.)
2820 __ Ror(out, in, rot);
2821 } else if (!out.Is(in)) {
2822 __ Mov(out, in);
2823 }
2824 } else {
2825 __ Ror(out, in, RegisterFrom(rhs));
2826 }
2827}
2828
2829// Gain some speed by mapping all Long rotates onto equivalent pairs of Integer
2830// rotates by swapping input regs (effectively rotating by the first 32-bits of
2831// a larger rotation) or flipping direction (thus treating larger right/left
2832// rotations as sub-word sized rotations in the other direction) as appropriate.
2833void InstructionCodeGeneratorARMVIXL::HandleLongRotate(HRor* ror) {
2834 LocationSummary* locations = ror->GetLocations();
2835 vixl32::Register in_reg_lo = LowRegisterFrom(locations->InAt(0));
2836 vixl32::Register in_reg_hi = HighRegisterFrom(locations->InAt(0));
2837 Location rhs = locations->InAt(1);
2838 vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out());
2839 vixl32::Register out_reg_hi = HighRegisterFrom(locations->Out());
2840
2841 if (rhs.IsConstant()) {
2842 uint64_t rot = CodeGenerator::GetInt64ValueOf(rhs.GetConstant());
2843 // Map all rotations to +ve. equivalents on the interval [0,63].
2844 rot &= kMaxLongShiftDistance;
2845 // For rotates over a word in size, 'pre-rotate' by 32-bits to keep rotate
2846 // logic below to a simple pair of binary orr.
2847 // (e.g. 34 bits == in_reg swap + 2 bits right.)
2848 if (rot >= kArmBitsPerWord) {
2849 rot -= kArmBitsPerWord;
2850 std::swap(in_reg_hi, in_reg_lo);
2851 }
2852 // Rotate, or mov to out for zero or word size rotations.
2853 if (rot != 0u) {
2854 __ Lsr(out_reg_hi, in_reg_hi, rot);
2855 __ Orr(out_reg_hi, out_reg_hi, Operand(in_reg_lo, ShiftType::LSL, kArmBitsPerWord - rot));
2856 __ Lsr(out_reg_lo, in_reg_lo, rot);
2857 __ Orr(out_reg_lo, out_reg_lo, Operand(in_reg_hi, ShiftType::LSL, kArmBitsPerWord - rot));
2858 } else {
2859 __ Mov(out_reg_lo, in_reg_lo);
2860 __ Mov(out_reg_hi, in_reg_hi);
2861 }
2862 } else {
2863 vixl32::Register shift_right = RegisterFrom(locations->GetTemp(0));
2864 vixl32::Register shift_left = RegisterFrom(locations->GetTemp(1));
2865 vixl32::Label end;
2866 vixl32::Label shift_by_32_plus_shift_right;
2867
2868 __ And(shift_right, RegisterFrom(rhs), 0x1F);
2869 __ Lsrs(shift_left, RegisterFrom(rhs), 6);
2870 // TODO(VIXL): Check that flags are kept after "vixl32::LeaveFlags" enabled.
2871 __ Rsb(shift_left, shift_right, kArmBitsPerWord);
2872 __ B(cc, &shift_by_32_plus_shift_right);
2873
2874 // out_reg_hi = (reg_hi << shift_left) | (reg_lo >> shift_right).
2875 // out_reg_lo = (reg_lo << shift_left) | (reg_hi >> shift_right).
2876 __ Lsl(out_reg_hi, in_reg_hi, shift_left);
2877 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
2878 __ Add(out_reg_hi, out_reg_hi, out_reg_lo);
2879 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
2880 __ Lsr(shift_left, in_reg_hi, shift_right);
2881 __ Add(out_reg_lo, out_reg_lo, shift_left);
2882 __ B(&end);
2883
2884 __ Bind(&shift_by_32_plus_shift_right); // Shift by 32+shift_right.
2885 // out_reg_hi = (reg_hi >> shift_right) | (reg_lo << shift_left).
2886 // out_reg_lo = (reg_lo >> shift_right) | (reg_hi << shift_left).
2887 __ Lsr(out_reg_hi, in_reg_hi, shift_right);
2888 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
2889 __ Add(out_reg_hi, out_reg_hi, out_reg_lo);
2890 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
2891 __ Lsl(shift_right, in_reg_hi, shift_left);
2892 __ Add(out_reg_lo, out_reg_lo, shift_right);
2893
2894 __ Bind(&end);
2895 }
2896}
2897
2898void LocationsBuilderARMVIXL::VisitRor(HRor* ror) {
2899 LocationSummary* locations =
2900 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
2901 switch (ror->GetResultType()) {
2902 case Primitive::kPrimInt: {
2903 locations->SetInAt(0, Location::RequiresRegister());
2904 locations->SetInAt(1, Location::RegisterOrConstant(ror->InputAt(1)));
2905 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2906 break;
2907 }
2908 case Primitive::kPrimLong: {
2909 locations->SetInAt(0, Location::RequiresRegister());
2910 if (ror->InputAt(1)->IsConstant()) {
2911 locations->SetInAt(1, Location::ConstantLocation(ror->InputAt(1)->AsConstant()));
2912 } else {
2913 locations->SetInAt(1, Location::RequiresRegister());
2914 locations->AddTemp(Location::RequiresRegister());
2915 locations->AddTemp(Location::RequiresRegister());
2916 }
2917 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2918 break;
2919 }
2920 default:
2921 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
2922 }
2923}
2924
2925void InstructionCodeGeneratorARMVIXL::VisitRor(HRor* ror) {
2926 Primitive::Type type = ror->GetResultType();
2927 switch (type) {
2928 case Primitive::kPrimInt: {
2929 HandleIntegerRotate(ror);
2930 break;
2931 }
2932 case Primitive::kPrimLong: {
2933 HandleLongRotate(ror);
2934 break;
2935 }
2936 default:
2937 LOG(FATAL) << "Unexpected operation type " << type;
2938 UNREACHABLE();
2939 }
2940}
2941
Artem Serov02d37832016-10-25 15:25:33 +01002942void LocationsBuilderARMVIXL::HandleShift(HBinaryOperation* op) {
2943 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2944
2945 LocationSummary* locations =
2946 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
2947
2948 switch (op->GetResultType()) {
2949 case Primitive::kPrimInt: {
2950 locations->SetInAt(0, Location::RequiresRegister());
2951 if (op->InputAt(1)->IsConstant()) {
2952 locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant()));
2953 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2954 } else {
2955 locations->SetInAt(1, Location::RequiresRegister());
2956 // Make the output overlap, as it will be used to hold the masked
2957 // second input.
2958 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2959 }
2960 break;
2961 }
2962 case Primitive::kPrimLong: {
2963 locations->SetInAt(0, Location::RequiresRegister());
2964 if (op->InputAt(1)->IsConstant()) {
2965 locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant()));
2966 // For simplicity, use kOutputOverlap even though we only require that low registers
2967 // don't clash with high registers which the register allocator currently guarantees.
2968 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2969 } else {
2970 locations->SetInAt(1, Location::RequiresRegister());
2971 locations->AddTemp(Location::RequiresRegister());
2972 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2973 }
2974 break;
2975 }
2976 default:
2977 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2978 }
2979}
2980
2981void InstructionCodeGeneratorARMVIXL::HandleShift(HBinaryOperation* op) {
2982 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2983
2984 LocationSummary* locations = op->GetLocations();
2985 Location out = locations->Out();
2986 Location first = locations->InAt(0);
2987 Location second = locations->InAt(1);
2988
2989 Primitive::Type type = op->GetResultType();
2990 switch (type) {
2991 case Primitive::kPrimInt: {
2992 vixl32::Register out_reg = OutputRegister(op);
2993 vixl32::Register first_reg = InputRegisterAt(op, 0);
2994 if (second.IsRegister()) {
2995 vixl32::Register second_reg = RegisterFrom(second);
2996 // ARM doesn't mask the shift count so we need to do it ourselves.
2997 __ And(out_reg, second_reg, kMaxIntShiftDistance);
2998 if (op->IsShl()) {
2999 __ Lsl(out_reg, first_reg, out_reg);
3000 } else if (op->IsShr()) {
3001 __ Asr(out_reg, first_reg, out_reg);
3002 } else {
3003 __ Lsr(out_reg, first_reg, out_reg);
3004 }
3005 } else {
3006 int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
3007 uint32_t shift_value = cst & kMaxIntShiftDistance;
3008 if (shift_value == 0) { // ARM does not support shifting with 0 immediate.
3009 __ Mov(out_reg, first_reg);
3010 } else if (op->IsShl()) {
3011 __ Lsl(out_reg, first_reg, shift_value);
3012 } else if (op->IsShr()) {
3013 __ Asr(out_reg, first_reg, shift_value);
3014 } else {
3015 __ Lsr(out_reg, first_reg, shift_value);
3016 }
3017 }
3018 break;
3019 }
3020 case Primitive::kPrimLong: {
3021 vixl32::Register o_h = HighRegisterFrom(out);
3022 vixl32::Register o_l = LowRegisterFrom(out);
3023
3024 vixl32::Register high = HighRegisterFrom(first);
3025 vixl32::Register low = LowRegisterFrom(first);
3026
3027 if (second.IsRegister()) {
3028 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
3029
3030 vixl32::Register second_reg = RegisterFrom(second);
3031
3032 if (op->IsShl()) {
3033 __ And(o_l, second_reg, kMaxLongShiftDistance);
3034 // Shift the high part
3035 __ Lsl(o_h, high, o_l);
3036 // Shift the low part and `or` what overflew on the high part
3037 __ Rsb(temp, o_l, kArmBitsPerWord);
3038 __ Lsr(temp, low, temp);
3039 __ Orr(o_h, o_h, temp);
3040 // If the shift is > 32 bits, override the high part
3041 __ Subs(temp, o_l, kArmBitsPerWord);
3042 {
3043 AssemblerAccurateScope guard(GetVIXLAssembler(),
3044 3 * kArmInstrMaxSizeInBytes,
3045 CodeBufferCheckScope::kMaximumSize);
3046 __ it(pl);
3047 __ lsl(pl, o_h, low, temp);
3048 }
3049 // Shift the low part
3050 __ Lsl(o_l, low, o_l);
3051 } else if (op->IsShr()) {
3052 __ And(o_h, second_reg, kMaxLongShiftDistance);
3053 // Shift the low part
3054 __ Lsr(o_l, low, o_h);
3055 // Shift the high part and `or` what underflew on the low part
3056 __ Rsb(temp, o_h, kArmBitsPerWord);
3057 __ Lsl(temp, high, temp);
3058 __ Orr(o_l, o_l, temp);
3059 // If the shift is > 32 bits, override the low part
3060 __ Subs(temp, o_h, kArmBitsPerWord);
3061 {
3062 AssemblerAccurateScope guard(GetVIXLAssembler(),
3063 3 * kArmInstrMaxSizeInBytes,
3064 CodeBufferCheckScope::kMaximumSize);
3065 __ it(pl);
3066 __ asr(pl, o_l, high, temp);
3067 }
3068 // Shift the high part
3069 __ Asr(o_h, high, o_h);
3070 } else {
3071 __ And(o_h, second_reg, kMaxLongShiftDistance);
3072 // same as Shr except we use `Lsr`s and not `Asr`s
3073 __ Lsr(o_l, low, o_h);
3074 __ Rsb(temp, o_h, kArmBitsPerWord);
3075 __ Lsl(temp, high, temp);
3076 __ Orr(o_l, o_l, temp);
3077 __ Subs(temp, o_h, kArmBitsPerWord);
3078 {
3079 AssemblerAccurateScope guard(GetVIXLAssembler(),
3080 3 * kArmInstrMaxSizeInBytes,
3081 CodeBufferCheckScope::kMaximumSize);
3082 __ it(pl);
3083 __ lsr(pl, o_l, high, temp);
3084 }
3085 __ Lsr(o_h, high, o_h);
3086 }
3087 } else {
3088 // Register allocator doesn't create partial overlap.
3089 DCHECK(!o_l.Is(high));
3090 DCHECK(!o_h.Is(low));
3091 int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
3092 uint32_t shift_value = cst & kMaxLongShiftDistance;
3093 if (shift_value > 32) {
3094 if (op->IsShl()) {
3095 __ Lsl(o_h, low, shift_value - 32);
3096 __ Mov(o_l, 0);
3097 } else if (op->IsShr()) {
3098 __ Asr(o_l, high, shift_value - 32);
3099 __ Asr(o_h, high, 31);
3100 } else {
3101 __ Lsr(o_l, high, shift_value - 32);
3102 __ Mov(o_h, 0);
3103 }
3104 } else if (shift_value == 32) {
3105 if (op->IsShl()) {
3106 __ Mov(o_h, low);
3107 __ Mov(o_l, 0);
3108 } else if (op->IsShr()) {
3109 __ Mov(o_l, high);
3110 __ Asr(o_h, high, 31);
3111 } else {
3112 __ Mov(o_l, high);
3113 __ Mov(o_h, 0);
3114 }
3115 } else if (shift_value == 1) {
3116 if (op->IsShl()) {
3117 __ Lsls(o_l, low, 1);
3118 __ Adc(o_h, high, high);
3119 } else if (op->IsShr()) {
3120 __ Asrs(o_h, high, 1);
3121 __ Rrx(o_l, low);
3122 } else {
3123 __ Lsrs(o_h, high, 1);
3124 __ Rrx(o_l, low);
3125 }
3126 } else {
3127 DCHECK(2 <= shift_value && shift_value < 32) << shift_value;
3128 if (op->IsShl()) {
3129 __ Lsl(o_h, high, shift_value);
3130 __ Orr(o_h, o_h, Operand(low, ShiftType::LSR, 32 - shift_value));
3131 __ Lsl(o_l, low, shift_value);
3132 } else if (op->IsShr()) {
3133 __ Lsr(o_l, low, shift_value);
3134 __ Orr(o_l, o_l, Operand(high, ShiftType::LSL, 32 - shift_value));
3135 __ Asr(o_h, high, shift_value);
3136 } else {
3137 __ Lsr(o_l, low, shift_value);
3138 __ Orr(o_l, o_l, Operand(high, ShiftType::LSL, 32 - shift_value));
3139 __ Lsr(o_h, high, shift_value);
3140 }
3141 }
3142 }
3143 break;
3144 }
3145 default:
3146 LOG(FATAL) << "Unexpected operation type " << type;
3147 UNREACHABLE();
3148 }
3149}
3150
3151void LocationsBuilderARMVIXL::VisitShl(HShl* shl) {
3152 HandleShift(shl);
3153}
3154
3155void InstructionCodeGeneratorARMVIXL::VisitShl(HShl* shl) {
3156 HandleShift(shl);
3157}
3158
3159void LocationsBuilderARMVIXL::VisitShr(HShr* shr) {
3160 HandleShift(shr);
3161}
3162
3163void InstructionCodeGeneratorARMVIXL::VisitShr(HShr* shr) {
3164 HandleShift(shr);
3165}
3166
3167void LocationsBuilderARMVIXL::VisitUShr(HUShr* ushr) {
3168 HandleShift(ushr);
3169}
3170
3171void InstructionCodeGeneratorARMVIXL::VisitUShr(HUShr* ushr) {
3172 HandleShift(ushr);
3173}
3174
3175void LocationsBuilderARMVIXL::VisitNewInstance(HNewInstance* instruction) {
3176 LocationSummary* locations =
3177 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
3178 if (instruction->IsStringAlloc()) {
3179 locations->AddTemp(LocationFrom(kMethodRegister));
3180 } else {
3181 InvokeRuntimeCallingConventionARMVIXL calling_convention;
3182 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
3183 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
3184 }
3185 locations->SetOut(LocationFrom(r0));
3186}
3187
3188void InstructionCodeGeneratorARMVIXL::VisitNewInstance(HNewInstance* instruction) {
3189 // Note: if heap poisoning is enabled, the entry point takes cares
3190 // of poisoning the reference.
3191 if (instruction->IsStringAlloc()) {
3192 // String is allocated through StringFactory. Call NewEmptyString entry point.
3193 vixl32::Register temp = RegisterFrom(instruction->GetLocations()->GetTemp(0));
3194 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize);
3195 GetAssembler()->LoadFromOffset(kLoadWord, temp, tr, QUICK_ENTRY_POINT(pNewEmptyString));
3196 GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, code_offset.Int32Value());
3197 AssemblerAccurateScope aas(GetVIXLAssembler(),
3198 kArmInstrMaxSizeInBytes,
3199 CodeBufferCheckScope::kMaximumSize);
3200 __ blx(lr);
3201 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3202 } else {
3203 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
3204 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3205 }
3206}
3207
3208void LocationsBuilderARMVIXL::VisitNewArray(HNewArray* instruction) {
3209 LocationSummary* locations =
3210 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
3211 InvokeRuntimeCallingConventionARMVIXL calling_convention;
3212 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
3213 locations->SetOut(LocationFrom(r0));
3214 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
3215 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
3216}
3217
3218void InstructionCodeGeneratorARMVIXL::VisitNewArray(HNewArray* instruction) {
3219 InvokeRuntimeCallingConventionARMVIXL calling_convention;
Andreas Gampea5b09a62016-11-17 15:21:22 -08003220 __ Mov(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex().index_);
Artem Serov02d37832016-10-25 15:25:33 +01003221 // Note: if heap poisoning is enabled, the entry point takes cares
3222 // of poisoning the reference.
3223 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
3224 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
3225}
3226
3227void LocationsBuilderARMVIXL::VisitParameterValue(HParameterValue* instruction) {
3228 LocationSummary* locations =
3229 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3230 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3231 if (location.IsStackSlot()) {
3232 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3233 } else if (location.IsDoubleStackSlot()) {
3234 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3235 }
3236 locations->SetOut(location);
3237}
3238
3239void InstructionCodeGeneratorARMVIXL::VisitParameterValue(
3240 HParameterValue* instruction ATTRIBUTE_UNUSED) {
3241 // Nothing to do, the parameter is already at its location.
3242}
3243
3244void LocationsBuilderARMVIXL::VisitCurrentMethod(HCurrentMethod* instruction) {
3245 LocationSummary* locations =
3246 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3247 locations->SetOut(LocationFrom(kMethodRegister));
3248}
3249
3250void InstructionCodeGeneratorARMVIXL::VisitCurrentMethod(
3251 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3252 // Nothing to do, the method is already at its location.
3253}
3254
3255void LocationsBuilderARMVIXL::VisitNot(HNot* not_) {
3256 LocationSummary* locations =
3257 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
3258 locations->SetInAt(0, Location::RequiresRegister());
3259 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3260}
3261
3262void InstructionCodeGeneratorARMVIXL::VisitNot(HNot* not_) {
3263 LocationSummary* locations = not_->GetLocations();
3264 Location out = locations->Out();
3265 Location in = locations->InAt(0);
3266 switch (not_->GetResultType()) {
3267 case Primitive::kPrimInt:
3268 __ Mvn(OutputRegister(not_), InputRegisterAt(not_, 0));
3269 break;
3270
3271 case Primitive::kPrimLong:
3272 __ Mvn(LowRegisterFrom(out), LowRegisterFrom(in));
3273 __ Mvn(HighRegisterFrom(out), HighRegisterFrom(in));
3274 break;
3275
3276 default:
3277 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
3278 }
3279}
3280
Scott Wakelingc34dba72016-10-03 10:14:44 +01003281void LocationsBuilderARMVIXL::VisitBooleanNot(HBooleanNot* bool_not) {
3282 LocationSummary* locations =
3283 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
3284 locations->SetInAt(0, Location::RequiresRegister());
3285 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3286}
3287
3288void InstructionCodeGeneratorARMVIXL::VisitBooleanNot(HBooleanNot* bool_not) {
3289 __ Eor(OutputRegister(bool_not), InputRegister(bool_not), 1);
3290}
3291
Artem Serov02d37832016-10-25 15:25:33 +01003292void LocationsBuilderARMVIXL::VisitCompare(HCompare* compare) {
3293 LocationSummary* locations =
3294 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
3295 switch (compare->InputAt(0)->GetType()) {
3296 case Primitive::kPrimBoolean:
3297 case Primitive::kPrimByte:
3298 case Primitive::kPrimShort:
3299 case Primitive::kPrimChar:
3300 case Primitive::kPrimInt:
3301 case Primitive::kPrimLong: {
3302 locations->SetInAt(0, Location::RequiresRegister());
3303 locations->SetInAt(1, Location::RequiresRegister());
3304 // Output overlaps because it is written before doing the low comparison.
3305 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3306 break;
3307 }
3308 case Primitive::kPrimFloat:
3309 case Primitive::kPrimDouble: {
3310 locations->SetInAt(0, Location::RequiresFpuRegister());
3311 locations->SetInAt(1, ArithmeticZeroOrFpuRegister(compare->InputAt(1)));
3312 locations->SetOut(Location::RequiresRegister());
3313 break;
3314 }
3315 default:
3316 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
3317 }
3318}
3319
3320void InstructionCodeGeneratorARMVIXL::VisitCompare(HCompare* compare) {
3321 LocationSummary* locations = compare->GetLocations();
3322 vixl32::Register out = OutputRegister(compare);
3323 Location left = locations->InAt(0);
3324 Location right = locations->InAt(1);
3325
3326 vixl32::Label less, greater, done;
3327 Primitive::Type type = compare->InputAt(0)->GetType();
3328 vixl32::Condition less_cond = vixl32::Condition(kNone);
3329 switch (type) {
3330 case Primitive::kPrimBoolean:
3331 case Primitive::kPrimByte:
3332 case Primitive::kPrimShort:
3333 case Primitive::kPrimChar:
3334 case Primitive::kPrimInt: {
3335 // Emit move to `out` before the `Cmp`, as `Mov` might affect the status flags.
3336 __ Mov(out, 0);
3337 __ Cmp(RegisterFrom(left), RegisterFrom(right)); // Signed compare.
3338 less_cond = lt;
3339 break;
3340 }
3341 case Primitive::kPrimLong: {
3342 __ Cmp(HighRegisterFrom(left), HighRegisterFrom(right)); // Signed compare.
3343 __ B(lt, &less);
3344 __ B(gt, &greater);
3345 // Emit move to `out` before the last `Cmp`, as `Mov` might affect the status flags.
3346 __ Mov(out, 0);
3347 __ Cmp(LowRegisterFrom(left), LowRegisterFrom(right)); // Unsigned compare.
3348 less_cond = lo;
3349 break;
3350 }
3351 case Primitive::kPrimFloat:
3352 case Primitive::kPrimDouble: {
3353 __ Mov(out, 0);
3354 GenerateVcmp(compare);
3355 // To branch on the FP compare result we transfer FPSCR to APSR (encoded as PC in VMRS).
3356 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
3357 less_cond = ARMFPCondition(kCondLT, compare->IsGtBias());
3358 break;
3359 }
3360 default:
3361 LOG(FATAL) << "Unexpected compare type " << type;
3362 UNREACHABLE();
3363 }
3364
3365 __ B(eq, &done);
3366 __ B(less_cond, &less);
3367
3368 __ Bind(&greater);
3369 __ Mov(out, 1);
3370 __ B(&done);
3371
3372 __ Bind(&less);
3373 __ Mov(out, -1);
3374
3375 __ Bind(&done);
3376}
3377
3378void LocationsBuilderARMVIXL::VisitPhi(HPhi* instruction) {
3379 LocationSummary* locations =
3380 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3381 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
3382 locations->SetInAt(i, Location::Any());
3383 }
3384 locations->SetOut(Location::Any());
3385}
3386
3387void InstructionCodeGeneratorARMVIXL::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
3388 LOG(FATAL) << "Unreachable";
3389}
3390
3391void CodeGeneratorARMVIXL::GenerateMemoryBarrier(MemBarrierKind kind) {
3392 // TODO (ported from quick): revisit ARM barrier kinds.
3393 DmbOptions flavor = DmbOptions::ISH; // Quiet C++ warnings.
3394 switch (kind) {
3395 case MemBarrierKind::kAnyStore:
3396 case MemBarrierKind::kLoadAny:
3397 case MemBarrierKind::kAnyAny: {
3398 flavor = DmbOptions::ISH;
3399 break;
3400 }
3401 case MemBarrierKind::kStoreStore: {
3402 flavor = DmbOptions::ISHST;
3403 break;
3404 }
3405 default:
3406 LOG(FATAL) << "Unexpected memory barrier " << kind;
3407 }
3408 __ Dmb(flavor);
3409}
3410
3411void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicLoad(vixl32::Register addr,
3412 uint32_t offset,
3413 vixl32::Register out_lo,
3414 vixl32::Register out_hi) {
3415 UseScratchRegisterScope temps(GetVIXLAssembler());
3416 if (offset != 0) {
3417 vixl32::Register temp = temps.Acquire();
3418 __ Add(temp, addr, offset);
3419 addr = temp;
3420 }
3421 __ Ldrexd(out_lo, out_hi, addr);
3422}
3423
3424void InstructionCodeGeneratorARMVIXL::GenerateWideAtomicStore(vixl32::Register addr,
3425 uint32_t offset,
3426 vixl32::Register value_lo,
3427 vixl32::Register value_hi,
3428 vixl32::Register temp1,
3429 vixl32::Register temp2,
3430 HInstruction* instruction) {
3431 UseScratchRegisterScope temps(GetVIXLAssembler());
3432 vixl32::Label fail;
3433 if (offset != 0) {
3434 vixl32::Register temp = temps.Acquire();
3435 __ Add(temp, addr, offset);
3436 addr = temp;
3437 }
3438 __ Bind(&fail);
3439 // We need a load followed by store. (The address used in a STREX instruction must
3440 // be the same as the address in the most recently executed LDREX instruction.)
3441 __ Ldrexd(temp1, temp2, addr);
3442 codegen_->MaybeRecordImplicitNullCheck(instruction);
3443 __ Strexd(temp1, value_lo, value_hi, addr);
3444 __ Cbnz(temp1, &fail);
3445}
Artem Serov02109dd2016-09-23 17:17:54 +01003446
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003447void LocationsBuilderARMVIXL::HandleFieldSet(
3448 HInstruction* instruction, const FieldInfo& field_info) {
3449 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
3450
3451 LocationSummary* locations =
3452 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3453 locations->SetInAt(0, Location::RequiresRegister());
3454
3455 Primitive::Type field_type = field_info.GetFieldType();
3456 if (Primitive::IsFloatingPointType(field_type)) {
3457 locations->SetInAt(1, Location::RequiresFpuRegister());
3458 } else {
3459 locations->SetInAt(1, Location::RequiresRegister());
3460 }
3461
3462 bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
3463 bool generate_volatile = field_info.IsVolatile()
3464 && is_wide
3465 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
3466 bool needs_write_barrier =
3467 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
3468 // Temporary registers for the write barrier.
3469 // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
3470 if (needs_write_barrier) {
3471 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
3472 locations->AddTemp(Location::RequiresRegister());
3473 } else if (generate_volatile) {
3474 // ARM encoding have some additional constraints for ldrexd/strexd:
3475 // - registers need to be consecutive
3476 // - the first register should be even but not R14.
3477 // We don't test for ARM yet, and the assertion makes sure that we
3478 // revisit this if we ever enable ARM encoding.
3479 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
3480
3481 locations->AddTemp(Location::RequiresRegister());
3482 locations->AddTemp(Location::RequiresRegister());
3483 if (field_type == Primitive::kPrimDouble) {
3484 // For doubles we need two more registers to copy the value.
3485 locations->AddTemp(LocationFrom(r2));
3486 locations->AddTemp(LocationFrom(r3));
3487 }
3488 }
3489}
3490
3491void InstructionCodeGeneratorARMVIXL::HandleFieldSet(HInstruction* instruction,
3492 const FieldInfo& field_info,
3493 bool value_can_be_null) {
3494 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
3495
3496 LocationSummary* locations = instruction->GetLocations();
3497 vixl32::Register base = InputRegisterAt(instruction, 0);
3498 Location value = locations->InAt(1);
3499
3500 bool is_volatile = field_info.IsVolatile();
3501 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
3502 Primitive::Type field_type = field_info.GetFieldType();
3503 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
3504 bool needs_write_barrier =
3505 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
3506
3507 if (is_volatile) {
3508 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
3509 }
3510
3511 switch (field_type) {
3512 case Primitive::kPrimBoolean:
3513 case Primitive::kPrimByte: {
3514 GetAssembler()->StoreToOffset(kStoreByte, RegisterFrom(value), base, offset);
3515 break;
3516 }
3517
3518 case Primitive::kPrimShort:
3519 case Primitive::kPrimChar: {
3520 GetAssembler()->StoreToOffset(kStoreHalfword, RegisterFrom(value), base, offset);
3521 break;
3522 }
3523
3524 case Primitive::kPrimInt:
3525 case Primitive::kPrimNot: {
3526 if (kPoisonHeapReferences && needs_write_barrier) {
3527 // Note that in the case where `value` is a null reference,
3528 // we do not enter this block, as a null reference does not
3529 // need poisoning.
3530 DCHECK_EQ(field_type, Primitive::kPrimNot);
3531 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
3532 __ Mov(temp, RegisterFrom(value));
3533 GetAssembler()->PoisonHeapReference(temp);
3534 GetAssembler()->StoreToOffset(kStoreWord, temp, base, offset);
3535 } else {
3536 GetAssembler()->StoreToOffset(kStoreWord, RegisterFrom(value), base, offset);
3537 }
3538 break;
3539 }
3540
3541 case Primitive::kPrimLong: {
3542 if (is_volatile && !atomic_ldrd_strd) {
3543 GenerateWideAtomicStore(base,
3544 offset,
3545 LowRegisterFrom(value),
3546 HighRegisterFrom(value),
3547 RegisterFrom(locations->GetTemp(0)),
3548 RegisterFrom(locations->GetTemp(1)),
3549 instruction);
3550 } else {
3551 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), base, offset);
3552 codegen_->MaybeRecordImplicitNullCheck(instruction);
3553 }
3554 break;
3555 }
3556
3557 case Primitive::kPrimFloat: {
3558 GetAssembler()->StoreSToOffset(SRegisterFrom(value), base, offset);
3559 break;
3560 }
3561
3562 case Primitive::kPrimDouble: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01003563 vixl32::DRegister value_reg = DRegisterFrom(value);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003564 if (is_volatile && !atomic_ldrd_strd) {
3565 vixl32::Register value_reg_lo = RegisterFrom(locations->GetTemp(0));
3566 vixl32::Register value_reg_hi = RegisterFrom(locations->GetTemp(1));
3567
3568 __ Vmov(value_reg_lo, value_reg_hi, value_reg);
3569
3570 GenerateWideAtomicStore(base,
3571 offset,
3572 value_reg_lo,
3573 value_reg_hi,
3574 RegisterFrom(locations->GetTemp(2)),
3575 RegisterFrom(locations->GetTemp(3)),
3576 instruction);
3577 } else {
3578 GetAssembler()->StoreDToOffset(value_reg, base, offset);
3579 codegen_->MaybeRecordImplicitNullCheck(instruction);
3580 }
3581 break;
3582 }
3583
3584 case Primitive::kPrimVoid:
3585 LOG(FATAL) << "Unreachable type " << field_type;
3586 UNREACHABLE();
3587 }
3588
3589 // Longs and doubles are handled in the switch.
3590 if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
3591 codegen_->MaybeRecordImplicitNullCheck(instruction);
3592 }
3593
3594 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
3595 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
3596 vixl32::Register card = RegisterFrom(locations->GetTemp(1));
3597 codegen_->MarkGCCard(temp, card, base, RegisterFrom(value), value_can_be_null);
3598 }
3599
3600 if (is_volatile) {
3601 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
3602 }
3603}
3604
Artem Serov02d37832016-10-25 15:25:33 +01003605void LocationsBuilderARMVIXL::HandleFieldGet(HInstruction* instruction,
3606 const FieldInfo& field_info) {
3607 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
3608
3609 bool object_field_get_with_read_barrier =
3610 kEmitCompilerReadBarrier && (field_info.GetFieldType() == Primitive::kPrimNot);
3611 LocationSummary* locations =
3612 new (GetGraph()->GetArena()) LocationSummary(instruction,
3613 object_field_get_with_read_barrier ?
3614 LocationSummary::kCallOnSlowPath :
3615 LocationSummary::kNoCall);
3616 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
3617 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
3618 }
3619 locations->SetInAt(0, Location::RequiresRegister());
3620
3621 bool volatile_for_double = field_info.IsVolatile()
3622 && (field_info.GetFieldType() == Primitive::kPrimDouble)
3623 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
3624 // The output overlaps in case of volatile long: we don't want the
3625 // code generated by GenerateWideAtomicLoad to overwrite the
3626 // object's location. Likewise, in the case of an object field get
3627 // with read barriers enabled, we do not want the load to overwrite
3628 // the object's location, as we need it to emit the read barrier.
3629 bool overlap = (field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong)) ||
3630 object_field_get_with_read_barrier;
3631
3632 if (Primitive::IsFloatingPointType(instruction->GetType())) {
3633 locations->SetOut(Location::RequiresFpuRegister());
3634 } else {
3635 locations->SetOut(Location::RequiresRegister(),
3636 (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
3637 }
3638 if (volatile_for_double) {
3639 // ARM encoding have some additional constraints for ldrexd/strexd:
3640 // - registers need to be consecutive
3641 // - the first register should be even but not R14.
3642 // We don't test for ARM yet, and the assertion makes sure that we
3643 // revisit this if we ever enable ARM encoding.
3644 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
3645 locations->AddTemp(Location::RequiresRegister());
3646 locations->AddTemp(Location::RequiresRegister());
3647 } else if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
3648 // We need a temporary register for the read barrier marking slow
3649 // path in CodeGeneratorARM::GenerateFieldLoadWithBakerReadBarrier.
3650 locations->AddTemp(Location::RequiresRegister());
3651 }
3652}
3653
3654Location LocationsBuilderARMVIXL::ArithmeticZeroOrFpuRegister(HInstruction* input) {
3655 DCHECK(Primitive::IsFloatingPointType(input->GetType())) << input->GetType();
3656 if ((input->IsFloatConstant() && (input->AsFloatConstant()->IsArithmeticZero())) ||
3657 (input->IsDoubleConstant() && (input->AsDoubleConstant()->IsArithmeticZero()))) {
3658 return Location::ConstantLocation(input->AsConstant());
3659 } else {
3660 return Location::RequiresFpuRegister();
3661 }
3662}
3663
Artem Serov02109dd2016-09-23 17:17:54 +01003664Location LocationsBuilderARMVIXL::ArmEncodableConstantOrRegister(HInstruction* constant,
3665 Opcode opcode) {
3666 DCHECK(!Primitive::IsFloatingPointType(constant->GetType()));
3667 if (constant->IsConstant() &&
3668 CanEncodeConstantAsImmediate(constant->AsConstant(), opcode)) {
3669 return Location::ConstantLocation(constant->AsConstant());
3670 }
3671 return Location::RequiresRegister();
3672}
3673
3674bool LocationsBuilderARMVIXL::CanEncodeConstantAsImmediate(HConstant* input_cst,
3675 Opcode opcode) {
3676 uint64_t value = static_cast<uint64_t>(Int64FromConstant(input_cst));
3677 if (Primitive::Is64BitType(input_cst->GetType())) {
3678 Opcode high_opcode = opcode;
3679 SetCc low_set_cc = kCcDontCare;
3680 switch (opcode) {
3681 case SUB:
3682 // Flip the operation to an ADD.
3683 value = -value;
3684 opcode = ADD;
3685 FALLTHROUGH_INTENDED;
3686 case ADD:
3687 if (Low32Bits(value) == 0u) {
3688 return CanEncodeConstantAsImmediate(High32Bits(value), opcode, kCcDontCare);
3689 }
3690 high_opcode = ADC;
3691 low_set_cc = kCcSet;
3692 break;
3693 default:
3694 break;
3695 }
3696 return CanEncodeConstantAsImmediate(Low32Bits(value), opcode, low_set_cc) &&
3697 CanEncodeConstantAsImmediate(High32Bits(value), high_opcode, kCcDontCare);
3698 } else {
3699 return CanEncodeConstantAsImmediate(Low32Bits(value), opcode);
3700 }
3701}
3702
3703// TODO(VIXL): Replace art::arm::SetCc` with `vixl32::FlagsUpdate after flags set optimization
3704// enabled.
3705bool LocationsBuilderARMVIXL::CanEncodeConstantAsImmediate(uint32_t value,
3706 Opcode opcode,
3707 SetCc set_cc) {
3708 ArmVIXLAssembler* assembler = codegen_->GetAssembler();
3709 if (assembler->ShifterOperandCanHold(opcode, value, set_cc)) {
3710 return true;
3711 }
3712 Opcode neg_opcode = kNoOperand;
3713 switch (opcode) {
3714 case AND: neg_opcode = BIC; value = ~value; break;
3715 case ORR: neg_opcode = ORN; value = ~value; break;
3716 case ADD: neg_opcode = SUB; value = -value; break;
3717 case ADC: neg_opcode = SBC; value = ~value; break;
3718 case SUB: neg_opcode = ADD; value = -value; break;
3719 case SBC: neg_opcode = ADC; value = ~value; break;
3720 default:
3721 return false;
3722 }
3723 return assembler->ShifterOperandCanHold(neg_opcode, value, set_cc);
3724}
3725
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003726void InstructionCodeGeneratorARMVIXL::HandleFieldGet(HInstruction* instruction,
3727 const FieldInfo& field_info) {
3728 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
3729
3730 LocationSummary* locations = instruction->GetLocations();
3731 vixl32::Register base = InputRegisterAt(instruction, 0);
3732 Location out = locations->Out();
3733 bool is_volatile = field_info.IsVolatile();
3734 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
3735 Primitive::Type field_type = field_info.GetFieldType();
3736 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
3737
3738 switch (field_type) {
3739 case Primitive::kPrimBoolean:
3740 GetAssembler()->LoadFromOffset(kLoadUnsignedByte, RegisterFrom(out), base, offset);
3741 break;
3742
3743 case Primitive::kPrimByte:
3744 GetAssembler()->LoadFromOffset(kLoadSignedByte, RegisterFrom(out), base, offset);
3745 break;
3746
3747 case Primitive::kPrimShort:
3748 GetAssembler()->LoadFromOffset(kLoadSignedHalfword, RegisterFrom(out), base, offset);
3749 break;
3750
3751 case Primitive::kPrimChar:
3752 GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, RegisterFrom(out), base, offset);
3753 break;
3754
3755 case Primitive::kPrimInt:
3756 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
3757 break;
3758
3759 case Primitive::kPrimNot: {
3760 // /* HeapReference<Object> */ out = *(base + offset)
3761 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3762 TODO_VIXL32(FATAL);
3763 } else {
3764 GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
3765 // TODO(VIXL): Scope to guarantee the position immediately after the load.
3766 codegen_->MaybeRecordImplicitNullCheck(instruction);
3767 if (is_volatile) {
3768 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
3769 }
3770 // If read barriers are enabled, emit read barriers other than
3771 // Baker's using a slow path (and also unpoison the loaded
3772 // reference, if heap poisoning is enabled).
3773 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, locations->InAt(0), offset);
3774 }
3775 break;
3776 }
3777
3778 case Primitive::kPrimLong:
3779 if (is_volatile && !atomic_ldrd_strd) {
3780 GenerateWideAtomicLoad(base, offset, LowRegisterFrom(out), HighRegisterFrom(out));
3781 } else {
3782 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out), base, offset);
3783 }
3784 break;
3785
3786 case Primitive::kPrimFloat:
3787 GetAssembler()->LoadSFromOffset(SRegisterFrom(out), base, offset);
3788 break;
3789
3790 case Primitive::kPrimDouble: {
Scott Wakelingc34dba72016-10-03 10:14:44 +01003791 vixl32::DRegister out_dreg = DRegisterFrom(out);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003792 if (is_volatile && !atomic_ldrd_strd) {
3793 vixl32::Register lo = RegisterFrom(locations->GetTemp(0));
3794 vixl32::Register hi = RegisterFrom(locations->GetTemp(1));
3795 GenerateWideAtomicLoad(base, offset, lo, hi);
3796 // TODO(VIXL): Do we need to be immediately after the ldrexd instruction? If so we need a
3797 // scope.
3798 codegen_->MaybeRecordImplicitNullCheck(instruction);
3799 __ Vmov(out_dreg, lo, hi);
3800 } else {
3801 GetAssembler()->LoadDFromOffset(out_dreg, base, offset);
3802 // TODO(VIXL): Scope to guarantee the position immediately after the load.
3803 codegen_->MaybeRecordImplicitNullCheck(instruction);
3804 }
3805 break;
3806 }
3807
3808 case Primitive::kPrimVoid:
3809 LOG(FATAL) << "Unreachable type " << field_type;
3810 UNREACHABLE();
3811 }
3812
3813 if (field_type == Primitive::kPrimNot || field_type == Primitive::kPrimDouble) {
3814 // Potential implicit null checks, in the case of reference or
3815 // double fields, are handled in the previous switch statement.
3816 } else {
3817 // Address cases other than reference and double that may require an implicit null check.
3818 codegen_->MaybeRecordImplicitNullCheck(instruction);
3819 }
3820
3821 if (is_volatile) {
3822 if (field_type == Primitive::kPrimNot) {
3823 // Memory barriers, in the case of references, are also handled
3824 // in the previous switch statement.
3825 } else {
3826 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
3827 }
3828 }
3829}
3830
3831void LocationsBuilderARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3832 HandleFieldSet(instruction, instruction->GetFieldInfo());
3833}
3834
3835void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3836 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
3837}
3838
3839void LocationsBuilderARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3840 HandleFieldGet(instruction, instruction->GetFieldInfo());
3841}
3842
3843void InstructionCodeGeneratorARMVIXL::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3844 HandleFieldGet(instruction, instruction->GetFieldInfo());
3845}
3846
3847void LocationsBuilderARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3848 HandleFieldGet(instruction, instruction->GetFieldInfo());
3849}
3850
3851void InstructionCodeGeneratorARMVIXL::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3852 HandleFieldGet(instruction, instruction->GetFieldInfo());
3853}
3854
Scott Wakelingc34dba72016-10-03 10:14:44 +01003855void LocationsBuilderARMVIXL::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3856 HandleFieldSet(instruction, instruction->GetFieldInfo());
3857}
3858
3859void InstructionCodeGeneratorARMVIXL::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3860 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
3861}
3862
Artem Serovcfbe9132016-10-14 15:58:56 +01003863void LocationsBuilderARMVIXL::VisitUnresolvedInstanceFieldGet(
3864 HUnresolvedInstanceFieldGet* instruction) {
3865 FieldAccessCallingConventionARMVIXL calling_convention;
3866 codegen_->CreateUnresolvedFieldLocationSummary(
3867 instruction, instruction->GetFieldType(), calling_convention);
3868}
3869
3870void InstructionCodeGeneratorARMVIXL::VisitUnresolvedInstanceFieldGet(
3871 HUnresolvedInstanceFieldGet* instruction) {
3872 FieldAccessCallingConventionARMVIXL calling_convention;
3873 codegen_->GenerateUnresolvedFieldAccess(instruction,
3874 instruction->GetFieldType(),
3875 instruction->GetFieldIndex(),
3876 instruction->GetDexPc(),
3877 calling_convention);
3878}
3879
3880void LocationsBuilderARMVIXL::VisitUnresolvedInstanceFieldSet(
3881 HUnresolvedInstanceFieldSet* instruction) {
3882 FieldAccessCallingConventionARMVIXL calling_convention;
3883 codegen_->CreateUnresolvedFieldLocationSummary(
3884 instruction, instruction->GetFieldType(), calling_convention);
3885}
3886
3887void InstructionCodeGeneratorARMVIXL::VisitUnresolvedInstanceFieldSet(
3888 HUnresolvedInstanceFieldSet* instruction) {
3889 FieldAccessCallingConventionARMVIXL calling_convention;
3890 codegen_->GenerateUnresolvedFieldAccess(instruction,
3891 instruction->GetFieldType(),
3892 instruction->GetFieldIndex(),
3893 instruction->GetDexPc(),
3894 calling_convention);
3895}
3896
3897void LocationsBuilderARMVIXL::VisitUnresolvedStaticFieldGet(
3898 HUnresolvedStaticFieldGet* instruction) {
3899 FieldAccessCallingConventionARMVIXL calling_convention;
3900 codegen_->CreateUnresolvedFieldLocationSummary(
3901 instruction, instruction->GetFieldType(), calling_convention);
3902}
3903
3904void InstructionCodeGeneratorARMVIXL::VisitUnresolvedStaticFieldGet(
3905 HUnresolvedStaticFieldGet* instruction) {
3906 FieldAccessCallingConventionARMVIXL calling_convention;
3907 codegen_->GenerateUnresolvedFieldAccess(instruction,
3908 instruction->GetFieldType(),
3909 instruction->GetFieldIndex(),
3910 instruction->GetDexPc(),
3911 calling_convention);
3912}
3913
3914void LocationsBuilderARMVIXL::VisitUnresolvedStaticFieldSet(
3915 HUnresolvedStaticFieldSet* instruction) {
3916 FieldAccessCallingConventionARMVIXL calling_convention;
3917 codegen_->CreateUnresolvedFieldLocationSummary(
3918 instruction, instruction->GetFieldType(), calling_convention);
3919}
3920
3921void InstructionCodeGeneratorARMVIXL::VisitUnresolvedStaticFieldSet(
3922 HUnresolvedStaticFieldSet* instruction) {
3923 FieldAccessCallingConventionARMVIXL calling_convention;
3924 codegen_->GenerateUnresolvedFieldAccess(instruction,
3925 instruction->GetFieldType(),
3926 instruction->GetFieldIndex(),
3927 instruction->GetDexPc(),
3928 calling_convention);
3929}
3930
Scott Wakelinga7812ae2016-10-17 10:03:36 +01003931void LocationsBuilderARMVIXL::VisitNullCheck(HNullCheck* instruction) {
3932 // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/
3933 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3934 ? LocationSummary::kCallOnSlowPath
3935 : LocationSummary::kNoCall;
3936 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3937 locations->SetInAt(0, Location::RequiresRegister());
3938 if (instruction->HasUses()) {
3939 locations->SetOut(Location::SameAsFirstInput());
3940 }
3941}
3942
3943void CodeGeneratorARMVIXL::GenerateImplicitNullCheck(HNullCheck* instruction) {
3944 if (CanMoveNullCheckToUser(instruction)) {
3945 return;
3946 }
3947
3948 UseScratchRegisterScope temps(GetVIXLAssembler());
3949 AssemblerAccurateScope aas(GetVIXLAssembler(),
3950 kArmInstrMaxSizeInBytes,
3951 CodeBufferCheckScope::kMaximumSize);
3952 __ ldr(temps.Acquire(), MemOperand(InputRegisterAt(instruction, 0)));
3953 RecordPcInfo(instruction, instruction->GetDexPc());
3954}
3955
3956void CodeGeneratorARMVIXL::GenerateExplicitNullCheck(HNullCheck* instruction) {
3957 NullCheckSlowPathARMVIXL* slow_path =
3958 new (GetGraph()->GetArena()) NullCheckSlowPathARMVIXL(instruction);
3959 AddSlowPath(slow_path);
3960 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3961}
3962
3963void InstructionCodeGeneratorARMVIXL::VisitNullCheck(HNullCheck* instruction) {
3964 codegen_->GenerateNullCheck(instruction);
3965}
3966
Scott Wakelingc34dba72016-10-03 10:14:44 +01003967static LoadOperandType GetLoadOperandType(Primitive::Type type) {
3968 switch (type) {
3969 case Primitive::kPrimNot:
3970 return kLoadWord;
3971 case Primitive::kPrimBoolean:
3972 return kLoadUnsignedByte;
3973 case Primitive::kPrimByte:
3974 return kLoadSignedByte;
3975 case Primitive::kPrimChar:
3976 return kLoadUnsignedHalfword;
3977 case Primitive::kPrimShort:
3978 return kLoadSignedHalfword;
3979 case Primitive::kPrimInt:
3980 return kLoadWord;
3981 case Primitive::kPrimLong:
3982 return kLoadWordPair;
3983 case Primitive::kPrimFloat:
3984 return kLoadSWord;
3985 case Primitive::kPrimDouble:
3986 return kLoadDWord;
3987 default:
3988 LOG(FATAL) << "Unreachable type " << type;
3989 UNREACHABLE();
3990 }
3991}
3992
3993static StoreOperandType GetStoreOperandType(Primitive::Type type) {
3994 switch (type) {
3995 case Primitive::kPrimNot:
3996 return kStoreWord;
3997 case Primitive::kPrimBoolean:
3998 case Primitive::kPrimByte:
3999 return kStoreByte;
4000 case Primitive::kPrimChar:
4001 case Primitive::kPrimShort:
4002 return kStoreHalfword;
4003 case Primitive::kPrimInt:
4004 return kStoreWord;
4005 case Primitive::kPrimLong:
4006 return kStoreWordPair;
4007 case Primitive::kPrimFloat:
4008 return kStoreSWord;
4009 case Primitive::kPrimDouble:
4010 return kStoreDWord;
4011 default:
4012 LOG(FATAL) << "Unreachable type " << type;
4013 UNREACHABLE();
4014 }
4015}
4016
4017void CodeGeneratorARMVIXL::LoadFromShiftedRegOffset(Primitive::Type type,
4018 Location out_loc,
4019 vixl32::Register base,
4020 vixl32::Register reg_index,
4021 vixl32::Condition cond) {
4022 uint32_t shift_count = Primitive::ComponentSizeShift(type);
4023 MemOperand mem_address(base, reg_index, vixl32::LSL, shift_count);
4024
4025 switch (type) {
4026 case Primitive::kPrimByte:
4027 __ Ldrsb(cond, RegisterFrom(out_loc), mem_address);
4028 break;
4029 case Primitive::kPrimBoolean:
4030 __ Ldrb(cond, RegisterFrom(out_loc), mem_address);
4031 break;
4032 case Primitive::kPrimShort:
4033 __ Ldrsh(cond, RegisterFrom(out_loc), mem_address);
4034 break;
4035 case Primitive::kPrimChar:
4036 __ Ldrh(cond, RegisterFrom(out_loc), mem_address);
4037 break;
4038 case Primitive::kPrimNot:
4039 case Primitive::kPrimInt:
4040 __ Ldr(cond, RegisterFrom(out_loc), mem_address);
4041 break;
4042 // T32 doesn't support LoadFromShiftedRegOffset mem address mode for these types.
4043 case Primitive::kPrimLong:
4044 case Primitive::kPrimFloat:
4045 case Primitive::kPrimDouble:
4046 default:
4047 LOG(FATAL) << "Unreachable type " << type;
4048 UNREACHABLE();
4049 }
4050}
4051
4052void CodeGeneratorARMVIXL::StoreToShiftedRegOffset(Primitive::Type type,
4053 Location loc,
4054 vixl32::Register base,
4055 vixl32::Register reg_index,
4056 vixl32::Condition cond) {
4057 uint32_t shift_count = Primitive::ComponentSizeShift(type);
4058 MemOperand mem_address(base, reg_index, vixl32::LSL, shift_count);
4059
4060 switch (type) {
4061 case Primitive::kPrimByte:
4062 case Primitive::kPrimBoolean:
4063 __ Strb(cond, RegisterFrom(loc), mem_address);
4064 break;
4065 case Primitive::kPrimShort:
4066 case Primitive::kPrimChar:
4067 __ Strh(cond, RegisterFrom(loc), mem_address);
4068 break;
4069 case Primitive::kPrimNot:
4070 case Primitive::kPrimInt:
4071 __ Str(cond, RegisterFrom(loc), mem_address);
4072 break;
4073 // T32 doesn't support StoreToShiftedRegOffset mem address mode for these types.
4074 case Primitive::kPrimLong:
4075 case Primitive::kPrimFloat:
4076 case Primitive::kPrimDouble:
4077 default:
4078 LOG(FATAL) << "Unreachable type " << type;
4079 UNREACHABLE();
4080 }
4081}
4082
4083void LocationsBuilderARMVIXL::VisitArrayGet(HArrayGet* instruction) {
4084 bool object_array_get_with_read_barrier =
4085 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
4086 LocationSummary* locations =
4087 new (GetGraph()->GetArena()) LocationSummary(instruction,
4088 object_array_get_with_read_barrier ?
4089 LocationSummary::kCallOnSlowPath :
4090 LocationSummary::kNoCall);
4091 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4092 TODO_VIXL32(FATAL);
4093 }
4094 locations->SetInAt(0, Location::RequiresRegister());
4095 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4096 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4097 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4098 } else {
4099 // The output overlaps in the case of an object array get with
4100 // read barriers enabled: we do not want the move to overwrite the
4101 // array's location, as we need it to emit the read barrier.
4102 locations->SetOut(
4103 Location::RequiresRegister(),
4104 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
4105 }
4106 // We need a temporary register for the read barrier marking slow
4107 // path in CodeGeneratorARM::GenerateArrayLoadWithBakerReadBarrier.
4108 // Also need for String compression feature.
4109 if ((object_array_get_with_read_barrier && kUseBakerReadBarrier)
4110 || (mirror::kUseStringCompression && instruction->IsStringCharAt())) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004111 locations->AddTemp(Location::RequiresRegister());
Scott Wakelingc34dba72016-10-03 10:14:44 +01004112 }
4113}
4114
4115void InstructionCodeGeneratorARMVIXL::VisitArrayGet(HArrayGet* instruction) {
4116 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
4117 LocationSummary* locations = instruction->GetLocations();
4118 Location obj_loc = locations->InAt(0);
4119 vixl32::Register obj = InputRegisterAt(instruction, 0);
4120 Location index = locations->InAt(1);
4121 Location out_loc = locations->Out();
4122 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
4123 Primitive::Type type = instruction->GetType();
4124 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
4125 instruction->IsStringCharAt();
4126 HInstruction* array_instr = instruction->GetArray();
4127 bool has_intermediate_address = array_instr->IsIntermediateAddress();
4128 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
4129 DCHECK(!(has_intermediate_address && kEmitCompilerReadBarrier));
4130
4131 switch (type) {
4132 case Primitive::kPrimBoolean:
4133 case Primitive::kPrimByte:
4134 case Primitive::kPrimShort:
4135 case Primitive::kPrimChar:
4136 case Primitive::kPrimInt: {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004137 vixl32::Register length;
4138 if (maybe_compressed_char_at) {
4139 length = RegisterFrom(locations->GetTemp(0));
4140 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
4141 GetAssembler()->LoadFromOffset(kLoadWord, length, obj, count_offset);
4142 codegen_->MaybeRecordImplicitNullCheck(instruction);
4143 }
Scott Wakelingc34dba72016-10-03 10:14:44 +01004144 if (index.IsConstant()) {
4145 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
4146 if (maybe_compressed_char_at) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004147 vixl32::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004148 __ Lsrs(length, length, 1u); // LSRS has a 16-bit encoding, TST (immediate) does not.
4149 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
4150 "Expecting 0=compressed, 1=uncompressed");
4151 __ B(cs, &uncompressed_load);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004152 GetAssembler()->LoadFromOffset(kLoadUnsignedByte,
4153 RegisterFrom(out_loc),
4154 obj,
4155 data_offset + const_index);
4156 __ B(&done);
4157 __ Bind(&uncompressed_load);
4158 GetAssembler()->LoadFromOffset(GetLoadOperandType(Primitive::kPrimChar),
4159 RegisterFrom(out_loc),
4160 obj,
4161 data_offset + (const_index << 1));
4162 __ Bind(&done);
Scott Wakelingc34dba72016-10-03 10:14:44 +01004163 } else {
4164 uint32_t full_offset = data_offset + (const_index << Primitive::ComponentSizeShift(type));
4165
4166 LoadOperandType load_type = GetLoadOperandType(type);
4167 GetAssembler()->LoadFromOffset(load_type, RegisterFrom(out_loc), obj, full_offset);
4168 }
4169 } else {
4170 vixl32::Register temp = temps.Acquire();
4171
4172 if (has_intermediate_address) {
Artem Serov2bbc9532016-10-21 11:51:50 +01004173 // We do not need to compute the intermediate address from the array: the
4174 // input instruction has done it already. See the comment in
4175 // `TryExtractArrayAccessAddress()`.
4176 if (kIsDebugBuild) {
4177 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
4178 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), data_offset);
4179 }
4180 temp = obj;
Scott Wakelingc34dba72016-10-03 10:14:44 +01004181 } else {
4182 __ Add(temp, obj, data_offset);
4183 }
4184 if (maybe_compressed_char_at) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004185 vixl32::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004186 __ Lsrs(length, length, 1u); // LSRS has a 16-bit encoding, TST (immediate) does not.
4187 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
4188 "Expecting 0=compressed, 1=uncompressed");
4189 __ B(cs, &uncompressed_load);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004190 __ Ldrb(RegisterFrom(out_loc), MemOperand(temp, RegisterFrom(index), vixl32::LSL, 0));
4191 __ B(&done);
4192 __ Bind(&uncompressed_load);
4193 __ Ldrh(RegisterFrom(out_loc), MemOperand(temp, RegisterFrom(index), vixl32::LSL, 1));
4194 __ Bind(&done);
Scott Wakelingc34dba72016-10-03 10:14:44 +01004195 } else {
4196 codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, RegisterFrom(index));
4197 }
4198 }
4199 break;
4200 }
4201
4202 case Primitive::kPrimNot: {
4203 static_assert(
4204 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4205 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
4206 // /* HeapReference<Object> */ out =
4207 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4208 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4209 TODO_VIXL32(FATAL);
4210 } else {
4211 vixl32::Register out = OutputRegister(instruction);
4212 if (index.IsConstant()) {
4213 size_t offset =
4214 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4215 GetAssembler()->LoadFromOffset(kLoadWord, out, obj, offset);
4216 codegen_->MaybeRecordImplicitNullCheck(instruction);
4217 // If read barriers are enabled, emit read barriers other than
4218 // Baker's using a slow path (and also unpoison the loaded
4219 // reference, if heap poisoning is enabled).
4220 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4221 } else {
4222 vixl32::Register temp = temps.Acquire();
4223
4224 if (has_intermediate_address) {
Artem Serov2bbc9532016-10-21 11:51:50 +01004225 // We do not need to compute the intermediate address from the array: the
4226 // input instruction has done it already. See the comment in
4227 // `TryExtractArrayAccessAddress()`.
4228 if (kIsDebugBuild) {
4229 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
4230 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), data_offset);
4231 }
4232 temp = obj;
Scott Wakelingc34dba72016-10-03 10:14:44 +01004233 } else {
4234 __ Add(temp, obj, data_offset);
4235 }
4236 codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, RegisterFrom(index));
4237
4238 codegen_->MaybeRecordImplicitNullCheck(instruction);
4239 // If read barriers are enabled, emit read barriers other than
4240 // Baker's using a slow path (and also unpoison the loaded
4241 // reference, if heap poisoning is enabled).
4242 codegen_->MaybeGenerateReadBarrierSlow(
4243 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4244 }
4245 }
4246 break;
4247 }
4248
4249 case Primitive::kPrimLong: {
4250 if (index.IsConstant()) {
4251 size_t offset =
4252 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
4253 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out_loc), obj, offset);
4254 } else {
4255 vixl32::Register temp = temps.Acquire();
4256 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
4257 GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out_loc), temp, data_offset);
4258 }
4259 break;
4260 }
4261
4262 case Primitive::kPrimFloat: {
4263 vixl32::SRegister out = SRegisterFrom(out_loc);
4264 if (index.IsConstant()) {
4265 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4266 GetAssembler()->LoadSFromOffset(out, obj, offset);
4267 } else {
4268 vixl32::Register temp = temps.Acquire();
4269 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4));
4270 GetAssembler()->LoadSFromOffset(out, temp, data_offset);
4271 }
4272 break;
4273 }
4274
4275 case Primitive::kPrimDouble: {
4276 if (index.IsConstant()) {
4277 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
4278 GetAssembler()->LoadDFromOffset(DRegisterFrom(out_loc), obj, offset);
4279 } else {
4280 vixl32::Register temp = temps.Acquire();
4281 __ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
4282 GetAssembler()->LoadDFromOffset(DRegisterFrom(out_loc), temp, data_offset);
4283 }
4284 break;
4285 }
4286
4287 case Primitive::kPrimVoid:
4288 LOG(FATAL) << "Unreachable type " << type;
4289 UNREACHABLE();
4290 }
4291
4292 if (type == Primitive::kPrimNot) {
4293 // Potential implicit null checks, in the case of reference
4294 // arrays, are handled in the previous switch statement.
4295 } else if (!maybe_compressed_char_at) {
4296 codegen_->MaybeRecordImplicitNullCheck(instruction);
4297 }
4298}
4299
4300void LocationsBuilderARMVIXL::VisitArraySet(HArraySet* instruction) {
4301 Primitive::Type value_type = instruction->GetComponentType();
4302
4303 bool needs_write_barrier =
4304 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
4305 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
4306
4307 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
4308 instruction,
4309 may_need_runtime_call_for_type_check ?
4310 LocationSummary::kCallOnSlowPath :
4311 LocationSummary::kNoCall);
4312
4313 locations->SetInAt(0, Location::RequiresRegister());
4314 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4315 if (Primitive::IsFloatingPointType(value_type)) {
4316 locations->SetInAt(2, Location::RequiresFpuRegister());
4317 } else {
4318 locations->SetInAt(2, Location::RequiresRegister());
4319 }
4320 if (needs_write_barrier) {
4321 // Temporary registers for the write barrier.
4322 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
4323 locations->AddTemp(Location::RequiresRegister());
4324 }
4325}
4326
4327void InstructionCodeGeneratorARMVIXL::VisitArraySet(HArraySet* instruction) {
4328 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
4329 LocationSummary* locations = instruction->GetLocations();
4330 vixl32::Register array = InputRegisterAt(instruction, 0);
4331 Location index = locations->InAt(1);
4332 Primitive::Type value_type = instruction->GetComponentType();
4333 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
4334 bool needs_write_barrier =
4335 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
4336 uint32_t data_offset =
4337 mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
4338 Location value_loc = locations->InAt(2);
4339 HInstruction* array_instr = instruction->GetArray();
4340 bool has_intermediate_address = array_instr->IsIntermediateAddress();
4341 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
4342 DCHECK(!(has_intermediate_address && kEmitCompilerReadBarrier));
4343
4344 switch (value_type) {
4345 case Primitive::kPrimBoolean:
4346 case Primitive::kPrimByte:
4347 case Primitive::kPrimShort:
4348 case Primitive::kPrimChar:
4349 case Primitive::kPrimInt: {
4350 if (index.IsConstant()) {
4351 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
4352 uint32_t full_offset =
4353 data_offset + (const_index << Primitive::ComponentSizeShift(value_type));
4354 StoreOperandType store_type = GetStoreOperandType(value_type);
4355 GetAssembler()->StoreToOffset(store_type, RegisterFrom(value_loc), array, full_offset);
4356 } else {
4357 vixl32::Register temp = temps.Acquire();
4358
4359 if (has_intermediate_address) {
Artem Serov2bbc9532016-10-21 11:51:50 +01004360 // We do not need to compute the intermediate address from the array: the
4361 // input instruction has done it already. See the comment in
4362 // `TryExtractArrayAccessAddress()`.
4363 if (kIsDebugBuild) {
4364 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
4365 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == data_offset);
4366 }
4367 temp = array;
Scott Wakelingc34dba72016-10-03 10:14:44 +01004368 } else {
4369 __ Add(temp, array, data_offset);
4370 }
4371 codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
4372 }
4373 break;
4374 }
4375
4376 case Primitive::kPrimNot: {
4377 vixl32::Register value = RegisterFrom(value_loc);
4378 // TryExtractArrayAccessAddress optimization is never applied for non-primitive ArraySet.
4379 // See the comment in instruction_simplifier_shared.cc.
4380 DCHECK(!has_intermediate_address);
4381
4382 if (instruction->InputAt(2)->IsNullConstant()) {
4383 // Just setting null.
4384 if (index.IsConstant()) {
4385 size_t offset =
4386 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4387 GetAssembler()->StoreToOffset(kStoreWord, value, array, offset);
4388 } else {
4389 DCHECK(index.IsRegister()) << index;
4390 vixl32::Register temp = temps.Acquire();
4391 __ Add(temp, array, data_offset);
4392 codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
4393 }
4394 codegen_->MaybeRecordImplicitNullCheck(instruction);
4395 DCHECK(!needs_write_barrier);
4396 DCHECK(!may_need_runtime_call_for_type_check);
4397 break;
4398 }
4399
4400 DCHECK(needs_write_barrier);
4401 Location temp1_loc = locations->GetTemp(0);
4402 vixl32::Register temp1 = RegisterFrom(temp1_loc);
4403 Location temp2_loc = locations->GetTemp(1);
4404 vixl32::Register temp2 = RegisterFrom(temp2_loc);
4405 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4406 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4407 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
4408 vixl32::Label done;
4409 SlowPathCodeARMVIXL* slow_path = nullptr;
4410
4411 if (may_need_runtime_call_for_type_check) {
4412 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARMVIXL(instruction);
4413 codegen_->AddSlowPath(slow_path);
4414 if (instruction->GetValueCanBeNull()) {
4415 vixl32::Label non_zero;
4416 __ Cbnz(value, &non_zero);
4417 if (index.IsConstant()) {
4418 size_t offset =
4419 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4420 GetAssembler()->StoreToOffset(kStoreWord, value, array, offset);
4421 } else {
4422 DCHECK(index.IsRegister()) << index;
4423 vixl32::Register temp = temps.Acquire();
4424 __ Add(temp, array, data_offset);
4425 codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
4426 }
4427 codegen_->MaybeRecordImplicitNullCheck(instruction);
4428 __ B(&done);
4429 __ Bind(&non_zero);
4430 }
4431
4432 // Note that when read barriers are enabled, the type checks
4433 // are performed without read barriers. This is fine, even in
4434 // the case where a class object is in the from-space after
4435 // the flip, as a comparison involving such a type would not
4436 // produce a false positive; it may of course produce a false
4437 // negative, in which case we would take the ArraySet slow
4438 // path.
4439
4440 // /* HeapReference<Class> */ temp1 = array->klass_
4441 GetAssembler()->LoadFromOffset(kLoadWord, temp1, array, class_offset);
4442 codegen_->MaybeRecordImplicitNullCheck(instruction);
4443 GetAssembler()->MaybeUnpoisonHeapReference(temp1);
4444
4445 // /* HeapReference<Class> */ temp1 = temp1->component_type_
4446 GetAssembler()->LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
4447 // /* HeapReference<Class> */ temp2 = value->klass_
4448 GetAssembler()->LoadFromOffset(kLoadWord, temp2, value, class_offset);
4449 // If heap poisoning is enabled, no need to unpoison `temp1`
4450 // nor `temp2`, as we are comparing two poisoned references.
4451 __ Cmp(temp1, temp2);
4452
4453 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4454 vixl32::Label do_put;
4455 __ B(eq, &do_put);
4456 // If heap poisoning is enabled, the `temp1` reference has
4457 // not been unpoisoned yet; unpoison it now.
4458 GetAssembler()->MaybeUnpoisonHeapReference(temp1);
4459
4460 // /* HeapReference<Class> */ temp1 = temp1->super_class_
4461 GetAssembler()->LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
4462 // If heap poisoning is enabled, no need to unpoison
4463 // `temp1`, as we are comparing against null below.
4464 __ Cbnz(temp1, slow_path->GetEntryLabel());
4465 __ Bind(&do_put);
4466 } else {
4467 __ B(ne, slow_path->GetEntryLabel());
4468 }
4469 }
4470
4471 vixl32::Register source = value;
4472 if (kPoisonHeapReferences) {
4473 // Note that in the case where `value` is a null reference,
4474 // we do not enter this block, as a null reference does not
4475 // need poisoning.
4476 DCHECK_EQ(value_type, Primitive::kPrimNot);
4477 __ Mov(temp1, value);
4478 GetAssembler()->PoisonHeapReference(temp1);
4479 source = temp1;
4480 }
4481
4482 if (index.IsConstant()) {
4483 size_t offset =
4484 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4485 GetAssembler()->StoreToOffset(kStoreWord, source, array, offset);
4486 } else {
4487 DCHECK(index.IsRegister()) << index;
4488
4489 vixl32::Register temp = temps.Acquire();
4490 __ Add(temp, array, data_offset);
4491 codegen_->StoreToShiftedRegOffset(value_type,
4492 LocationFrom(source),
4493 temp,
4494 RegisterFrom(index));
4495 }
4496
4497 if (!may_need_runtime_call_for_type_check) {
4498 codegen_->MaybeRecordImplicitNullCheck(instruction);
4499 }
4500
4501 codegen_->MarkGCCard(temp1, temp2, array, value, instruction->GetValueCanBeNull());
4502
4503 if (done.IsReferenced()) {
4504 __ Bind(&done);
4505 }
4506
4507 if (slow_path != nullptr) {
4508 __ Bind(slow_path->GetExitLabel());
4509 }
4510
4511 break;
4512 }
4513
4514 case Primitive::kPrimLong: {
4515 Location value = locations->InAt(2);
4516 if (index.IsConstant()) {
4517 size_t offset =
4518 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
4519 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), array, offset);
4520 } else {
4521 vixl32::Register temp = temps.Acquire();
4522 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
4523 GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), temp, data_offset);
4524 }
4525 break;
4526 }
4527
4528 case Primitive::kPrimFloat: {
4529 Location value = locations->InAt(2);
4530 DCHECK(value.IsFpuRegister());
4531 if (index.IsConstant()) {
4532 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4533 GetAssembler()->StoreSToOffset(SRegisterFrom(value), array, offset);
4534 } else {
4535 vixl32::Register temp = temps.Acquire();
4536 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4));
4537 GetAssembler()->StoreSToOffset(SRegisterFrom(value), temp, data_offset);
4538 }
4539 break;
4540 }
4541
4542 case Primitive::kPrimDouble: {
4543 Location value = locations->InAt(2);
4544 DCHECK(value.IsFpuRegisterPair());
4545 if (index.IsConstant()) {
4546 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
4547 GetAssembler()->StoreDToOffset(DRegisterFrom(value), array, offset);
4548 } else {
4549 vixl32::Register temp = temps.Acquire();
4550 __ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
4551 GetAssembler()->StoreDToOffset(DRegisterFrom(value), temp, data_offset);
4552 }
4553 break;
4554 }
4555
4556 case Primitive::kPrimVoid:
4557 LOG(FATAL) << "Unreachable type " << value_type;
4558 UNREACHABLE();
4559 }
4560
4561 // Objects are handled in the switch.
4562 if (value_type != Primitive::kPrimNot) {
4563 codegen_->MaybeRecordImplicitNullCheck(instruction);
4564 }
4565}
4566
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004567void LocationsBuilderARMVIXL::VisitArrayLength(HArrayLength* instruction) {
4568 LocationSummary* locations =
4569 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4570 locations->SetInAt(0, Location::RequiresRegister());
4571 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4572}
4573
4574void InstructionCodeGeneratorARMVIXL::VisitArrayLength(HArrayLength* instruction) {
4575 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
4576 vixl32::Register obj = InputRegisterAt(instruction, 0);
4577 vixl32::Register out = OutputRegister(instruction);
4578 GetAssembler()->LoadFromOffset(kLoadWord, out, obj, offset);
4579 codegen_->MaybeRecordImplicitNullCheck(instruction);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004580 // Mask out compression flag from String's array length.
4581 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004582 __ Lsr(out, out, 1u);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004583 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004584}
4585
Artem Serov2bbc9532016-10-21 11:51:50 +01004586void LocationsBuilderARMVIXL::VisitIntermediateAddress(HIntermediateAddress* instruction) {
4587 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
4588 DCHECK(!kEmitCompilerReadBarrier);
4589 LocationSummary* locations =
4590 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4591
4592 locations->SetInAt(0, Location::RequiresRegister());
4593 locations->SetInAt(1, Location::RegisterOrConstant(instruction->GetOffset()));
4594 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4595}
4596
4597void InstructionCodeGeneratorARMVIXL::VisitIntermediateAddress(HIntermediateAddress* instruction) {
4598 vixl32::Register out = OutputRegister(instruction);
4599 vixl32::Register first = InputRegisterAt(instruction, 0);
4600 Location second = instruction->GetLocations()->InAt(1);
4601
4602 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
4603 DCHECK(!kEmitCompilerReadBarrier);
4604
4605 if (second.IsRegister()) {
4606 __ Add(out, first, RegisterFrom(second));
4607 } else {
4608 __ Add(out, first, second.GetConstant()->AsIntConstant()->GetValue());
4609 }
4610}
4611
Scott Wakelingc34dba72016-10-03 10:14:44 +01004612void LocationsBuilderARMVIXL::VisitBoundsCheck(HBoundsCheck* instruction) {
4613 RegisterSet caller_saves = RegisterSet::Empty();
4614 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4615 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0)));
4616 caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(1)));
4617 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
4618 locations->SetInAt(0, Location::RequiresRegister());
4619 locations->SetInAt(1, Location::RequiresRegister());
4620}
4621
4622void InstructionCodeGeneratorARMVIXL::VisitBoundsCheck(HBoundsCheck* instruction) {
4623 SlowPathCodeARMVIXL* slow_path =
4624 new (GetGraph()->GetArena()) BoundsCheckSlowPathARMVIXL(instruction);
4625 codegen_->AddSlowPath(slow_path);
4626
4627 vixl32::Register index = InputRegisterAt(instruction, 0);
4628 vixl32::Register length = InputRegisterAt(instruction, 1);
4629
4630 __ Cmp(index, length);
4631 __ B(hs, slow_path->GetEntryLabel());
4632}
4633
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004634void CodeGeneratorARMVIXL::MarkGCCard(vixl32::Register temp,
4635 vixl32::Register card,
4636 vixl32::Register object,
4637 vixl32::Register value,
4638 bool can_be_null) {
4639 vixl32::Label is_null;
4640 if (can_be_null) {
4641 __ Cbz(value, &is_null);
4642 }
4643 GetAssembler()->LoadFromOffset(
4644 kLoadWord, card, tr, Thread::CardTableOffset<kArmPointerSize>().Int32Value());
4645 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
4646 __ Strb(card, MemOperand(card, temp));
4647 if (can_be_null) {
4648 __ Bind(&is_null);
4649 }
4650}
4651
Scott Wakelingfe885462016-09-22 10:24:38 +01004652void LocationsBuilderARMVIXL::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4653 LOG(FATAL) << "Unreachable";
4654}
4655
4656void InstructionCodeGeneratorARMVIXL::VisitParallelMove(HParallelMove* instruction) {
4657 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4658}
4659
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004660void LocationsBuilderARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) {
4661 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4662 // TODO(VIXL): https://android-review.googlesource.com/#/c/275337/ and related.
4663}
4664
4665void InstructionCodeGeneratorARMVIXL::VisitSuspendCheck(HSuspendCheck* instruction) {
4666 HBasicBlock* block = instruction->GetBlock();
4667 if (block->GetLoopInformation() != nullptr) {
4668 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4669 // The back edge will generate the suspend check.
4670 return;
4671 }
4672 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4673 // The goto will generate the suspend check.
4674 return;
4675 }
4676 GenerateSuspendCheck(instruction, nullptr);
4677}
4678
4679void InstructionCodeGeneratorARMVIXL::GenerateSuspendCheck(HSuspendCheck* instruction,
4680 HBasicBlock* successor) {
4681 SuspendCheckSlowPathARMVIXL* slow_path =
4682 down_cast<SuspendCheckSlowPathARMVIXL*>(instruction->GetSlowPath());
4683 if (slow_path == nullptr) {
4684 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARMVIXL(instruction, successor);
4685 instruction->SetSlowPath(slow_path);
4686 codegen_->AddSlowPath(slow_path);
4687 if (successor != nullptr) {
4688 DCHECK(successor->IsLoopHeader());
4689 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
4690 }
4691 } else {
4692 DCHECK_EQ(slow_path->GetSuccessor(), successor);
4693 }
4694
4695 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
4696 vixl32::Register temp = temps.Acquire();
4697 GetAssembler()->LoadFromOffset(
4698 kLoadUnsignedHalfword, temp, tr, Thread::ThreadFlagsOffset<kArmPointerSize>().Int32Value());
4699 if (successor == nullptr) {
4700 __ Cbnz(temp, slow_path->GetEntryLabel());
4701 __ Bind(slow_path->GetReturnLabel());
4702 } else {
4703 __ Cbz(temp, codegen_->GetLabelOf(successor));
4704 __ B(slow_path->GetEntryLabel());
4705 }
4706}
4707
Scott Wakelingfe885462016-09-22 10:24:38 +01004708ArmVIXLAssembler* ParallelMoveResolverARMVIXL::GetAssembler() const {
4709 return codegen_->GetAssembler();
4710}
4711
4712void ParallelMoveResolverARMVIXL::EmitMove(size_t index) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004713 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
Scott Wakelingfe885462016-09-22 10:24:38 +01004714 MoveOperands* move = moves_[index];
4715 Location source = move->GetSource();
4716 Location destination = move->GetDestination();
4717
4718 if (source.IsRegister()) {
4719 if (destination.IsRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004720 __ Mov(RegisterFrom(destination), RegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01004721 } else if (destination.IsFpuRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004722 __ Vmov(SRegisterFrom(destination), RegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01004723 } else {
4724 DCHECK(destination.IsStackSlot());
4725 GetAssembler()->StoreToOffset(kStoreWord,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004726 RegisterFrom(source),
Scott Wakelingfe885462016-09-22 10:24:38 +01004727 sp,
4728 destination.GetStackIndex());
4729 }
4730 } else if (source.IsStackSlot()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004731 if (destination.IsRegister()) {
4732 GetAssembler()->LoadFromOffset(kLoadWord,
4733 RegisterFrom(destination),
4734 sp,
4735 source.GetStackIndex());
4736 } else if (destination.IsFpuRegister()) {
4737 GetAssembler()->LoadSFromOffset(SRegisterFrom(destination), sp, source.GetStackIndex());
4738 } else {
4739 DCHECK(destination.IsStackSlot());
4740 vixl32::Register temp = temps.Acquire();
4741 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, source.GetStackIndex());
4742 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
4743 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004744 } else if (source.IsFpuRegister()) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01004745 if (destination.IsRegister()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01004746 __ Vmov(RegisterFrom(destination), SRegisterFrom(source));
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01004747 } else if (destination.IsFpuRegister()) {
4748 __ Vmov(SRegisterFrom(destination), SRegisterFrom(source));
4749 } else {
4750 DCHECK(destination.IsStackSlot());
4751 GetAssembler()->StoreSToOffset(SRegisterFrom(source), sp, destination.GetStackIndex());
4752 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004753 } else if (source.IsDoubleStackSlot()) {
Alexandre Rames9c19bd62016-10-24 11:50:32 +01004754 if (destination.IsDoubleStackSlot()) {
4755 vixl32::DRegister temp = temps.AcquireD();
4756 GetAssembler()->LoadDFromOffset(temp, sp, source.GetStackIndex());
4757 GetAssembler()->StoreDToOffset(temp, sp, destination.GetStackIndex());
4758 } else if (destination.IsRegisterPair()) {
4759 DCHECK(ExpectedPairLayout(destination));
4760 GetAssembler()->LoadFromOffset(
4761 kLoadWordPair, LowRegisterFrom(destination), sp, source.GetStackIndex());
4762 } else {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01004763 DCHECK(destination.IsFpuRegisterPair()) << destination;
4764 GetAssembler()->LoadDFromOffset(DRegisterFrom(destination), sp, source.GetStackIndex());
Alexandre Rames9c19bd62016-10-24 11:50:32 +01004765 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004766 } else if (source.IsRegisterPair()) {
4767 if (destination.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004768 __ Mov(LowRegisterFrom(destination), LowRegisterFrom(source));
4769 __ Mov(HighRegisterFrom(destination), HighRegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01004770 } else if (destination.IsFpuRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01004771 __ Vmov(DRegisterFrom(destination), LowRegisterFrom(source), HighRegisterFrom(source));
Scott Wakelingfe885462016-09-22 10:24:38 +01004772 } else {
4773 DCHECK(destination.IsDoubleStackSlot()) << destination;
4774 DCHECK(ExpectedPairLayout(source));
4775 GetAssembler()->StoreToOffset(kStoreWordPair,
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004776 LowRegisterFrom(source),
Scott Wakelingfe885462016-09-22 10:24:38 +01004777 sp,
4778 destination.GetStackIndex());
4779 }
4780 } else if (source.IsFpuRegisterPair()) {
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01004781 if (destination.IsRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01004782 __ Vmov(LowRegisterFrom(destination), HighRegisterFrom(destination), DRegisterFrom(source));
Alexandre Ramesb45fbaa52016-10-17 14:57:13 +01004783 } else if (destination.IsFpuRegisterPair()) {
4784 __ Vmov(DRegisterFrom(destination), DRegisterFrom(source));
4785 } else {
4786 DCHECK(destination.IsDoubleStackSlot()) << destination;
4787 GetAssembler()->StoreDToOffset(DRegisterFrom(source), sp, destination.GetStackIndex());
4788 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004789 } else {
4790 DCHECK(source.IsConstant()) << source;
4791 HConstant* constant = source.GetConstant();
4792 if (constant->IsIntConstant() || constant->IsNullConstant()) {
4793 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
4794 if (destination.IsRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004795 __ Mov(RegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01004796 } else {
4797 DCHECK(destination.IsStackSlot());
Scott Wakelingfe885462016-09-22 10:24:38 +01004798 vixl32::Register temp = temps.Acquire();
4799 __ Mov(temp, value);
4800 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
4801 }
4802 } else if (constant->IsLongConstant()) {
4803 int64_t value = constant->AsLongConstant()->GetValue();
4804 if (destination.IsRegisterPair()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004805 __ Mov(LowRegisterFrom(destination), Low32Bits(value));
4806 __ Mov(HighRegisterFrom(destination), High32Bits(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01004807 } else {
4808 DCHECK(destination.IsDoubleStackSlot()) << destination;
Scott Wakelingfe885462016-09-22 10:24:38 +01004809 vixl32::Register temp = temps.Acquire();
4810 __ Mov(temp, Low32Bits(value));
4811 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
4812 __ Mov(temp, High32Bits(value));
4813 GetAssembler()->StoreToOffset(kStoreWord,
4814 temp,
4815 sp,
4816 destination.GetHighStackIndex(kArmWordSize));
4817 }
4818 } else if (constant->IsDoubleConstant()) {
4819 double value = constant->AsDoubleConstant()->GetValue();
4820 if (destination.IsFpuRegisterPair()) {
Scott Wakelingc34dba72016-10-03 10:14:44 +01004821 __ Vmov(DRegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01004822 } else {
4823 DCHECK(destination.IsDoubleStackSlot()) << destination;
4824 uint64_t int_value = bit_cast<uint64_t, double>(value);
Scott Wakelingfe885462016-09-22 10:24:38 +01004825 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004826 __ Mov(temp, Low32Bits(int_value));
Scott Wakelingfe885462016-09-22 10:24:38 +01004827 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004828 __ Mov(temp, High32Bits(int_value));
Scott Wakelingfe885462016-09-22 10:24:38 +01004829 GetAssembler()->StoreToOffset(kStoreWord,
4830 temp,
4831 sp,
4832 destination.GetHighStackIndex(kArmWordSize));
4833 }
4834 } else {
4835 DCHECK(constant->IsFloatConstant()) << constant->DebugName();
4836 float value = constant->AsFloatConstant()->GetValue();
4837 if (destination.IsFpuRegister()) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004838 __ Vmov(SRegisterFrom(destination), value);
Scott Wakelingfe885462016-09-22 10:24:38 +01004839 } else {
4840 DCHECK(destination.IsStackSlot());
Scott Wakelingfe885462016-09-22 10:24:38 +01004841 vixl32::Register temp = temps.Acquire();
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004842 __ Mov(temp, bit_cast<int32_t, float>(value));
Scott Wakelingfe885462016-09-22 10:24:38 +01004843 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, destination.GetStackIndex());
4844 }
4845 }
4846 }
4847}
4848
Alexandre Rames9c19bd62016-10-24 11:50:32 +01004849void ParallelMoveResolverARMVIXL::Exchange(vixl32::Register reg, int mem) {
4850 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
4851 vixl32::Register temp = temps.Acquire();
4852 __ Mov(temp, reg);
4853 GetAssembler()->LoadFromOffset(kLoadWord, reg, sp, mem);
4854 GetAssembler()->StoreToOffset(kStoreWord, temp, sp, mem);
Scott Wakelingfe885462016-09-22 10:24:38 +01004855}
4856
Alexandre Rames9c19bd62016-10-24 11:50:32 +01004857void ParallelMoveResolverARMVIXL::Exchange(int mem1, int mem2) {
4858 // TODO(VIXL32): Double check the performance of this implementation.
4859 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
4860 vixl32::Register temp = temps.Acquire();
4861 vixl32::SRegister temp_s = temps.AcquireS();
4862
4863 __ Ldr(temp, MemOperand(sp, mem1));
4864 __ Vldr(temp_s, MemOperand(sp, mem2));
4865 __ Str(temp, MemOperand(sp, mem2));
4866 __ Vstr(temp_s, MemOperand(sp, mem1));
Scott Wakelingfe885462016-09-22 10:24:38 +01004867}
4868
Alexandre Rames9c19bd62016-10-24 11:50:32 +01004869void ParallelMoveResolverARMVIXL::EmitSwap(size_t index) {
4870 MoveOperands* move = moves_[index];
4871 Location source = move->GetSource();
4872 Location destination = move->GetDestination();
4873 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
4874
4875 if (source.IsRegister() && destination.IsRegister()) {
4876 vixl32::Register temp = temps.Acquire();
4877 DCHECK(!RegisterFrom(source).Is(temp));
4878 DCHECK(!RegisterFrom(destination).Is(temp));
4879 __ Mov(temp, RegisterFrom(destination));
4880 __ Mov(RegisterFrom(destination), RegisterFrom(source));
4881 __ Mov(RegisterFrom(source), temp);
4882 } else if (source.IsRegister() && destination.IsStackSlot()) {
4883 Exchange(RegisterFrom(source), destination.GetStackIndex());
4884 } else if (source.IsStackSlot() && destination.IsRegister()) {
4885 Exchange(RegisterFrom(destination), source.GetStackIndex());
4886 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
4887 TODO_VIXL32(FATAL);
4888 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
4889 TODO_VIXL32(FATAL);
4890 } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
4891 vixl32::DRegister temp = temps.AcquireD();
4892 __ Vmov(temp, LowRegisterFrom(source), HighRegisterFrom(source));
4893 __ Mov(LowRegisterFrom(source), LowRegisterFrom(destination));
4894 __ Mov(HighRegisterFrom(source), HighRegisterFrom(destination));
4895 __ Vmov(LowRegisterFrom(destination), HighRegisterFrom(destination), temp);
4896 } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
4897 vixl32::Register low_reg = LowRegisterFrom(source.IsRegisterPair() ? source : destination);
4898 int mem = source.IsRegisterPair() ? destination.GetStackIndex() : source.GetStackIndex();
4899 DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
4900 vixl32::DRegister temp = temps.AcquireD();
4901 __ Vmov(temp, low_reg, vixl32::Register(low_reg.GetCode() + 1));
4902 GetAssembler()->LoadFromOffset(kLoadWordPair, low_reg, sp, mem);
4903 GetAssembler()->StoreDToOffset(temp, sp, mem);
4904 } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01004905 vixl32::DRegister first = DRegisterFrom(source);
4906 vixl32::DRegister second = DRegisterFrom(destination);
4907 vixl32::DRegister temp = temps.AcquireD();
4908 __ Vmov(temp, first);
4909 __ Vmov(first, second);
4910 __ Vmov(second, temp);
Alexandre Rames9c19bd62016-10-24 11:50:32 +01004911 } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
4912 TODO_VIXL32(FATAL);
4913 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
4914 TODO_VIXL32(FATAL);
4915 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
4916 vixl32::DRegister temp1 = temps.AcquireD();
4917 vixl32::DRegister temp2 = temps.AcquireD();
4918 __ Vldr(temp1, MemOperand(sp, source.GetStackIndex()));
4919 __ Vldr(temp2, MemOperand(sp, destination.GetStackIndex()));
4920 __ Vstr(temp1, MemOperand(sp, destination.GetStackIndex()));
4921 __ Vstr(temp2, MemOperand(sp, source.GetStackIndex()));
4922 } else {
4923 LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
4924 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004925}
4926
4927void ParallelMoveResolverARMVIXL::SpillScratch(int reg ATTRIBUTE_UNUSED) {
4928 TODO_VIXL32(FATAL);
4929}
4930
4931void ParallelMoveResolverARMVIXL::RestoreScratch(int reg ATTRIBUTE_UNUSED) {
4932 TODO_VIXL32(FATAL);
4933}
4934
Artem Serov02d37832016-10-25 15:25:33 +01004935// Check if the desired_class_load_kind is supported. If it is, return it,
4936// otherwise return a fall-back kind that should be used instead.
4937HLoadClass::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadClassKind(
4938 HLoadClass::LoadKind desired_class_load_kind ATTRIBUTE_UNUSED) {
4939 // TODO(VIXL): Implement optimized code paths.
4940 return HLoadClass::LoadKind::kDexCacheViaMethod;
4941}
4942
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004943void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) {
4944 if (cls->NeedsAccessCheck()) {
4945 InvokeRuntimeCallingConventionARMVIXL calling_convention;
4946 CodeGenerator::CreateLoadClassLocationSummary(
4947 cls,
4948 LocationFrom(calling_convention.GetRegisterAt(0)),
4949 LocationFrom(r0),
4950 /* code_generator_supports_read_barrier */ true);
4951 return;
4952 }
Scott Wakelingfe885462016-09-22 10:24:38 +01004953
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004954 // TODO(VIXL): read barrier code.
4955 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
4956 ? LocationSummary::kCallOnSlowPath
4957 : LocationSummary::kNoCall;
4958 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
4959 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4960 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
4961 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod ||
4962 load_kind == HLoadClass::LoadKind::kDexCachePcRelative) {
4963 locations->SetInAt(0, Location::RequiresRegister());
4964 }
4965 locations->SetOut(Location::RequiresRegister());
4966}
4967
4968void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) {
4969 LocationSummary* locations = cls->GetLocations();
4970 if (cls->NeedsAccessCheck()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08004971 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004972 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
4973 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
4974 return;
4975 }
4976
4977 Location out_loc = locations->Out();
4978 vixl32::Register out = OutputRegister(cls);
4979
4980 // TODO(VIXL): read barrier code.
4981 bool generate_null_check = false;
4982 switch (cls->GetLoadKind()) {
4983 case HLoadClass::LoadKind::kReferrersClass: {
4984 DCHECK(!cls->CanCallRuntime());
4985 DCHECK(!cls->MustGenerateClinitCheck());
4986 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4987 vixl32::Register current_method = InputRegisterAt(cls, 0);
4988 GenerateGcRootFieldLoad(cls,
4989 out_loc,
4990 current_method,
Roland Levillain00468f32016-10-27 18:02:48 +01004991 ArtMethod::DeclaringClassOffset().Int32Value(),
4992 kEmitCompilerReadBarrier);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01004993 break;
4994 }
4995 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4996 // /* GcRoot<mirror::Class>[] */ out =
4997 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4998 vixl32::Register current_method = InputRegisterAt(cls, 0);
4999 const int32_t resolved_types_offset =
5000 ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value();
5001 GetAssembler()->LoadFromOffset(kLoadWord, out, current_method, resolved_types_offset);
5002 // /* GcRoot<mirror::Class> */ out = out[type_index]
Andreas Gampea5b09a62016-11-17 15:21:22 -08005003 size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_);
Roland Levillain00468f32016-10-27 18:02:48 +01005004 GenerateGcRootFieldLoad(cls, out_loc, out, offset, kEmitCompilerReadBarrier);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005005 generate_null_check = !cls->IsInDexCache();
5006 break;
5007 }
5008 default:
5009 TODO_VIXL32(FATAL);
5010 }
5011
5012 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5013 DCHECK(cls->CanCallRuntime());
5014 LoadClassSlowPathARMVIXL* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARMVIXL(
5015 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5016 codegen_->AddSlowPath(slow_path);
5017 if (generate_null_check) {
5018 __ Cbz(out, slow_path->GetEntryLabel());
5019 }
5020 if (cls->MustGenerateClinitCheck()) {
5021 GenerateClassInitializationCheck(slow_path, out);
5022 } else {
5023 __ Bind(slow_path->GetExitLabel());
5024 }
5025 }
5026}
5027
Artem Serov02d37832016-10-25 15:25:33 +01005028void LocationsBuilderARMVIXL::VisitClinitCheck(HClinitCheck* check) {
5029 LocationSummary* locations =
5030 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5031 locations->SetInAt(0, Location::RequiresRegister());
5032 if (check->HasUses()) {
5033 locations->SetOut(Location::SameAsFirstInput());
5034 }
5035}
5036
5037void InstructionCodeGeneratorARMVIXL::VisitClinitCheck(HClinitCheck* check) {
5038 // We assume the class is not null.
5039 LoadClassSlowPathARMVIXL* slow_path =
5040 new (GetGraph()->GetArena()) LoadClassSlowPathARMVIXL(check->GetLoadClass(),
5041 check,
5042 check->GetDexPc(),
5043 /* do_clinit */ true);
5044 codegen_->AddSlowPath(slow_path);
5045 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
5046}
5047
5048void InstructionCodeGeneratorARMVIXL::GenerateClassInitializationCheck(
5049 LoadClassSlowPathARMVIXL* slow_path, vixl32::Register class_reg) {
5050 UseScratchRegisterScope temps(GetVIXLAssembler());
5051 vixl32::Register temp = temps.Acquire();
5052 GetAssembler()->LoadFromOffset(kLoadWord,
5053 temp,
5054 class_reg,
5055 mirror::Class::StatusOffset().Int32Value());
5056 __ Cmp(temp, mirror::Class::kStatusInitialized);
5057 __ B(lt, slow_path->GetEntryLabel());
5058 // Even if the initialized flag is set, we may be in a situation where caches are not synced
5059 // properly. Therefore, we do a memory fence.
5060 __ Dmb(ISH);
5061 __ Bind(slow_path->GetExitLabel());
5062}
5063
5064// Check if the desired_string_load_kind is supported. If it is, return it,
5065// otherwise return a fall-back kind that should be used instead.
5066HLoadString::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadStringKind(
5067 HLoadString::LoadKind desired_string_load_kind ATTRIBUTE_UNUSED) {
5068 // TODO(VIXL): Implement optimized code paths. For now we always use the simpler fallback code.
5069 return HLoadString::LoadKind::kDexCacheViaMethod;
5070}
5071
5072void LocationsBuilderARMVIXL::VisitLoadString(HLoadString* load) {
5073 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
5074 ? LocationSummary::kCallOnMainOnly
5075 : LocationSummary::kNoCall;
5076 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
5077
5078 // TODO(VIXL): Implement optimized code paths.
5079 // See InstructionCodeGeneratorARMVIXL::VisitLoadString.
5080 HLoadString::LoadKind load_kind = load->GetLoadKind();
5081 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
5082 locations->SetInAt(0, Location::RequiresRegister());
5083 // TODO(VIXL): Use InvokeRuntimeCallingConventionARMVIXL instead.
5084 locations->SetOut(LocationFrom(r0));
5085 } else {
5086 locations->SetOut(Location::RequiresRegister());
5087 }
5088}
5089
5090void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) {
5091 // TODO(VIXL): Implement optimized code paths.
5092 // We implemented the simplest solution to get first ART tests passing, we deferred the
5093 // optimized path until later, we should implement it using ARM64 implementation as a
5094 // reference. The same related to LocationsBuilderARMVIXL::VisitLoadString.
5095
5096 // TODO: Re-add the compiler code to do string dex cache lookup again.
5097 DCHECK_EQ(load->GetLoadKind(), HLoadString::LoadKind::kDexCacheViaMethod);
5098 InvokeRuntimeCallingConventionARMVIXL calling_convention;
5099 __ Mov(calling_convention.GetRegisterAt(0), load->GetStringIndex());
5100 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
5101 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
5102}
5103
5104static int32_t GetExceptionTlsOffset() {
5105 return Thread::ExceptionOffset<kArmPointerSize>().Int32Value();
5106}
5107
5108void LocationsBuilderARMVIXL::VisitLoadException(HLoadException* load) {
5109 LocationSummary* locations =
5110 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5111 locations->SetOut(Location::RequiresRegister());
5112}
5113
5114void InstructionCodeGeneratorARMVIXL::VisitLoadException(HLoadException* load) {
5115 vixl32::Register out = OutputRegister(load);
5116 GetAssembler()->LoadFromOffset(kLoadWord, out, tr, GetExceptionTlsOffset());
5117}
5118
5119
5120void LocationsBuilderARMVIXL::VisitClearException(HClearException* clear) {
5121 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5122}
5123
5124void InstructionCodeGeneratorARMVIXL::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5125 UseScratchRegisterScope temps(GetVIXLAssembler());
5126 vixl32::Register temp = temps.Acquire();
5127 __ Mov(temp, 0);
5128 GetAssembler()->StoreToOffset(kStoreWord, temp, tr, GetExceptionTlsOffset());
5129}
5130
5131void LocationsBuilderARMVIXL::VisitThrow(HThrow* instruction) {
5132 LocationSummary* locations =
5133 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
5134 InvokeRuntimeCallingConventionARMVIXL calling_convention;
5135 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5136}
5137
5138void InstructionCodeGeneratorARMVIXL::VisitThrow(HThrow* instruction) {
5139 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
5140 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
5141}
5142
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005143static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5144 return kEmitCompilerReadBarrier &&
5145 (kUseBakerReadBarrier ||
5146 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5147 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5148 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5149}
5150
Artem Serovcfbe9132016-10-14 15:58:56 +01005151
5152void LocationsBuilderARMVIXL::VisitInstanceOf(HInstanceOf* instruction) {
5153 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5154 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5155 bool baker_read_barrier_slow_path = false;
5156 switch (type_check_kind) {
5157 case TypeCheckKind::kExactCheck:
5158 case TypeCheckKind::kAbstractClassCheck:
5159 case TypeCheckKind::kClassHierarchyCheck:
5160 case TypeCheckKind::kArrayObjectCheck:
5161 call_kind =
5162 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
5163 baker_read_barrier_slow_path = kUseBakerReadBarrier;
5164 break;
5165 case TypeCheckKind::kArrayCheck:
5166 case TypeCheckKind::kUnresolvedCheck:
5167 case TypeCheckKind::kInterfaceCheck:
5168 call_kind = LocationSummary::kCallOnSlowPath;
5169 break;
5170 }
5171
5172 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5173 if (baker_read_barrier_slow_path) {
5174 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5175 }
5176 locations->SetInAt(0, Location::RequiresRegister());
5177 locations->SetInAt(1, Location::RequiresRegister());
5178 // The "out" register is used as a temporary, so it overlaps with the inputs.
5179 // Note that TypeCheckSlowPathARM uses this register too.
5180 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
5181 // When read barriers are enabled, we need a temporary register for
5182 // some cases.
5183 if (TypeCheckNeedsATemporary(type_check_kind)) {
5184 locations->AddTemp(Location::RequiresRegister());
5185 }
5186}
5187
5188void InstructionCodeGeneratorARMVIXL::VisitInstanceOf(HInstanceOf* instruction) {
5189 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5190 LocationSummary* locations = instruction->GetLocations();
5191 Location obj_loc = locations->InAt(0);
5192 vixl32::Register obj = InputRegisterAt(instruction, 0);
5193 vixl32::Register cls = InputRegisterAt(instruction, 1);
5194 Location out_loc = locations->Out();
5195 vixl32::Register out = OutputRegister(instruction);
5196 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
5197 locations->GetTemp(0) :
5198 Location::NoLocation();
5199 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5200 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5201 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5202 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
5203 vixl32::Label done, zero;
5204 SlowPathCodeARMVIXL* slow_path = nullptr;
5205
5206 // Return 0 if `obj` is null.
5207 // avoid null check if we know obj is not null.
5208 if (instruction->MustDoNullCheck()) {
5209 __ Cbz(obj, &zero);
5210 }
5211
Artem Serovcfbe9132016-10-14 15:58:56 +01005212 switch (type_check_kind) {
5213 case TypeCheckKind::kExactCheck: {
Mathieu Chartier6beced42016-11-15 15:51:31 -08005214 // /* HeapReference<Class> */ out = obj->klass_
5215 GenerateReferenceLoadTwoRegisters(instruction,
5216 out_loc,
5217 obj_loc,
5218 class_offset,
5219 maybe_temp_loc);
Artem Serovcfbe9132016-10-14 15:58:56 +01005220 __ Cmp(out, cls);
5221 // Classes must be equal for the instanceof to succeed.
5222 __ B(ne, &zero);
5223 __ Mov(out, 1);
5224 __ B(&done);
5225 break;
5226 }
5227
5228 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier6beced42016-11-15 15:51:31 -08005229 // /* HeapReference<Class> */ out = obj->klass_
5230 GenerateReferenceLoadTwoRegisters(instruction,
5231 out_loc,
5232 obj_loc,
5233 class_offset,
5234 maybe_temp_loc);
Artem Serovcfbe9132016-10-14 15:58:56 +01005235 // If the class is abstract, we eagerly fetch the super class of the
5236 // object to avoid doing a comparison we know will fail.
5237 vixl32::Label loop;
5238 __ Bind(&loop);
5239 // /* HeapReference<Class> */ out = out->super_class_
5240 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
5241 // If `out` is null, we use it for the result, and jump to `done`.
5242 __ Cbz(out, &done);
5243 __ Cmp(out, cls);
5244 __ B(ne, &loop);
5245 __ Mov(out, 1);
5246 if (zero.IsReferenced()) {
5247 __ B(&done);
5248 }
5249 break;
5250 }
5251
5252 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier6beced42016-11-15 15:51:31 -08005253 // /* HeapReference<Class> */ out = obj->klass_
5254 GenerateReferenceLoadTwoRegisters(instruction,
5255 out_loc,
5256 obj_loc,
5257 class_offset,
5258 maybe_temp_loc);
Artem Serovcfbe9132016-10-14 15:58:56 +01005259 // Walk over the class hierarchy to find a match.
5260 vixl32::Label loop, success;
5261 __ Bind(&loop);
5262 __ Cmp(out, cls);
5263 __ B(eq, &success);
5264 // /* HeapReference<Class> */ out = out->super_class_
5265 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
5266 __ Cbnz(out, &loop);
5267 // If `out` is null, we use it for the result, and jump to `done`.
5268 __ B(&done);
5269 __ Bind(&success);
5270 __ Mov(out, 1);
5271 if (zero.IsReferenced()) {
5272 __ B(&done);
5273 }
5274 break;
5275 }
5276
5277 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier6beced42016-11-15 15:51:31 -08005278 // /* HeapReference<Class> */ out = obj->klass_
5279 GenerateReferenceLoadTwoRegisters(instruction,
5280 out_loc,
5281 obj_loc,
5282 class_offset,
5283 maybe_temp_loc);
Artem Serovcfbe9132016-10-14 15:58:56 +01005284 // Do an exact check.
5285 vixl32::Label exact_check;
5286 __ Cmp(out, cls);
5287 __ B(eq, &exact_check);
5288 // Otherwise, we need to check that the object's class is a non-primitive array.
5289 // /* HeapReference<Class> */ out = out->component_type_
5290 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
5291 // If `out` is null, we use it for the result, and jump to `done`.
5292 __ Cbz(out, &done);
5293 GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
5294 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
5295 __ Cbnz(out, &zero);
5296 __ Bind(&exact_check);
5297 __ Mov(out, 1);
5298 __ B(&done);
5299 break;
5300 }
5301
5302 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier6beced42016-11-15 15:51:31 -08005303 // /* HeapReference<Class> */ out = obj->klass_
5304 GenerateReferenceLoadTwoRegisters(instruction,
5305 out_loc,
5306 obj_loc,
5307 class_offset,
5308 maybe_temp_loc);
Artem Serovcfbe9132016-10-14 15:58:56 +01005309 __ Cmp(out, cls);
5310 DCHECK(locations->OnlyCallsOnSlowPath());
5311 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARMVIXL(instruction,
5312 /* is_fatal */ false);
5313 codegen_->AddSlowPath(slow_path);
5314 __ B(ne, slow_path->GetEntryLabel());
5315 __ Mov(out, 1);
5316 if (zero.IsReferenced()) {
5317 __ B(&done);
5318 }
5319 break;
5320 }
5321
5322 case TypeCheckKind::kUnresolvedCheck:
5323 case TypeCheckKind::kInterfaceCheck: {
5324 // Note that we indeed only call on slow path, but we always go
5325 // into the slow path for the unresolved and interface check
5326 // cases.
5327 //
5328 // We cannot directly call the InstanceofNonTrivial runtime
5329 // entry point without resorting to a type checking slow path
5330 // here (i.e. by calling InvokeRuntime directly), as it would
5331 // require to assign fixed registers for the inputs of this
5332 // HInstanceOf instruction (following the runtime calling
5333 // convention), which might be cluttered by the potential first
5334 // read barrier emission at the beginning of this method.
5335 //
5336 // TODO: Introduce a new runtime entry point taking the object
5337 // to test (instead of its class) as argument, and let it deal
5338 // with the read barrier issues. This will let us refactor this
5339 // case of the `switch` code as it was previously (with a direct
5340 // call to the runtime not using a type checking slow path).
5341 // This should also be beneficial for the other cases above.
5342 DCHECK(locations->OnlyCallsOnSlowPath());
5343 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARMVIXL(instruction,
5344 /* is_fatal */ false);
5345 codegen_->AddSlowPath(slow_path);
5346 __ B(slow_path->GetEntryLabel());
5347 if (zero.IsReferenced()) {
5348 __ B(&done);
5349 }
5350 break;
5351 }
5352 }
5353
5354 if (zero.IsReferenced()) {
5355 __ Bind(&zero);
5356 __ Mov(out, 0);
5357 }
5358
5359 if (done.IsReferenced()) {
5360 __ Bind(&done);
5361 }
5362
5363 if (slow_path != nullptr) {
5364 __ Bind(slow_path->GetExitLabel());
5365 }
5366}
5367
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005368void LocationsBuilderARMVIXL::VisitCheckCast(HCheckCast* instruction) {
5369 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5370 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
5371
5372 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5373 switch (type_check_kind) {
5374 case TypeCheckKind::kExactCheck:
5375 case TypeCheckKind::kAbstractClassCheck:
5376 case TypeCheckKind::kClassHierarchyCheck:
5377 case TypeCheckKind::kArrayObjectCheck:
5378 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5379 LocationSummary::kCallOnSlowPath :
5380 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
5381 break;
5382 case TypeCheckKind::kArrayCheck:
5383 case TypeCheckKind::kUnresolvedCheck:
5384 case TypeCheckKind::kInterfaceCheck:
5385 call_kind = LocationSummary::kCallOnSlowPath;
5386 break;
5387 }
5388
5389 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5390 locations->SetInAt(0, Location::RequiresRegister());
5391 locations->SetInAt(1, Location::RequiresRegister());
5392 // Note that TypeCheckSlowPathARM uses this "temp" register too.
5393 locations->AddTemp(Location::RequiresRegister());
5394 // When read barriers are enabled, we need an additional temporary
5395 // register for some cases.
5396 if (TypeCheckNeedsATemporary(type_check_kind)) {
5397 locations->AddTemp(Location::RequiresRegister());
5398 }
5399}
5400
5401void InstructionCodeGeneratorARMVIXL::VisitCheckCast(HCheckCast* instruction) {
5402 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5403 LocationSummary* locations = instruction->GetLocations();
5404 Location obj_loc = locations->InAt(0);
5405 vixl32::Register obj = InputRegisterAt(instruction, 0);
5406 vixl32::Register cls = InputRegisterAt(instruction, 1);
5407 Location temp_loc = locations->GetTemp(0);
5408 vixl32::Register temp = RegisterFrom(temp_loc);
5409 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
5410 locations->GetTemp(1) :
5411 Location::NoLocation();
5412 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Artem Serovcfbe9132016-10-14 15:58:56 +01005413 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5414 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5415 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005416
5417 bool is_type_check_slow_path_fatal =
5418 (type_check_kind == TypeCheckKind::kExactCheck ||
5419 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5420 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5421 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5422 !instruction->CanThrowIntoCatchBlock();
5423 SlowPathCodeARMVIXL* type_check_slow_path =
5424 new (GetGraph()->GetArena()) TypeCheckSlowPathARMVIXL(instruction,
5425 is_type_check_slow_path_fatal);
5426 codegen_->AddSlowPath(type_check_slow_path);
5427
5428 vixl32::Label done;
5429 // Avoid null check if we know obj is not null.
5430 if (instruction->MustDoNullCheck()) {
5431 __ Cbz(obj, &done);
5432 }
5433
5434 // /* HeapReference<Class> */ temp = obj->klass_
5435 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5436
5437 switch (type_check_kind) {
5438 case TypeCheckKind::kExactCheck:
5439 case TypeCheckKind::kArrayCheck: {
5440 __ Cmp(temp, cls);
5441 // Jump to slow path for throwing the exception or doing a
5442 // more involved array check.
5443 __ B(ne, type_check_slow_path->GetEntryLabel());
5444 break;
5445 }
5446
5447 case TypeCheckKind::kAbstractClassCheck: {
Artem Serovcfbe9132016-10-14 15:58:56 +01005448 // If the class is abstract, we eagerly fetch the super class of the
5449 // object to avoid doing a comparison we know will fail.
5450 vixl32::Label loop;
5451 __ Bind(&loop);
5452 // /* HeapReference<Class> */ temp = temp->super_class_
5453 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
5454
5455 // If the class reference currently in `temp` is null, jump to the slow path to throw the
5456 // exception.
5457 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
5458
5459 // Otherwise, compare the classes.
5460 __ Cmp(temp, cls);
5461 __ B(ne, &loop);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005462 break;
5463 }
5464
5465 case TypeCheckKind::kClassHierarchyCheck: {
Artem Serovcfbe9132016-10-14 15:58:56 +01005466 // Walk over the class hierarchy to find a match.
5467 vixl32::Label loop;
5468 __ Bind(&loop);
5469 __ Cmp(temp, cls);
5470 __ B(eq, &done);
5471
5472 // /* HeapReference<Class> */ temp = temp->super_class_
5473 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
5474
5475 // If the class reference currently in `temp` is null, jump to the slow path to throw the
5476 // exception.
5477 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
5478 // Otherwise, jump to the beginning of the loop.
5479 __ B(&loop);
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005480 break;
5481 }
5482
Artem Serovcfbe9132016-10-14 15:58:56 +01005483 case TypeCheckKind::kArrayObjectCheck: {
5484 // Do an exact check.
5485 __ Cmp(temp, cls);
5486 __ B(eq, &done);
5487
5488 // Otherwise, we need to check that the object's class is a non-primitive array.
5489 // /* HeapReference<Class> */ temp = temp->component_type_
5490 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
5491 // If the component type is null, jump to the slow path to throw the exception.
5492 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
5493 // Otherwise,the object is indeed an array, jump to label `check_non_primitive_component_type`
5494 // to further check that this component type is not a primitive type.
5495 GetAssembler()->LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
5496 static_assert(Primitive::kPrimNot == 0, "Expected 0 for art::Primitive::kPrimNot");
5497 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005498 break;
5499 }
5500
5501 case TypeCheckKind::kUnresolvedCheck:
5502 case TypeCheckKind::kInterfaceCheck:
Artem Serovcfbe9132016-10-14 15:58:56 +01005503 // We always go into the type check slow path for the unresolved
5504 // and interface check cases.
5505 //
5506 // We cannot directly call the CheckCast runtime entry point
5507 // without resorting to a type checking slow path here (i.e. by
5508 // calling InvokeRuntime directly), as it would require to
5509 // assign fixed registers for the inputs of this HInstanceOf
5510 // instruction (following the runtime calling convention), which
5511 // might be cluttered by the potential first read barrier
5512 // emission at the beginning of this method.
5513 __ B(type_check_slow_path->GetEntryLabel());
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005514 break;
5515 }
5516 __ Bind(&done);
5517
5518 __ Bind(type_check_slow_path->GetExitLabel());
5519}
5520
Artem Serov551b28f2016-10-18 19:11:30 +01005521void LocationsBuilderARMVIXL::VisitMonitorOperation(HMonitorOperation* instruction) {
5522 LocationSummary* locations =
5523 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
5524 InvokeRuntimeCallingConventionARMVIXL calling_convention;
5525 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5526}
5527
5528void InstructionCodeGeneratorARMVIXL::VisitMonitorOperation(HMonitorOperation* instruction) {
5529 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
5530 instruction,
5531 instruction->GetDexPc());
5532 if (instruction->IsEnter()) {
5533 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5534 } else {
5535 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5536 }
5537}
5538
Artem Serov02109dd2016-09-23 17:17:54 +01005539void LocationsBuilderARMVIXL::VisitAnd(HAnd* instruction) {
5540 HandleBitwiseOperation(instruction, AND);
5541}
5542
5543void LocationsBuilderARMVIXL::VisitOr(HOr* instruction) {
5544 HandleBitwiseOperation(instruction, ORR);
5545}
5546
5547void LocationsBuilderARMVIXL::VisitXor(HXor* instruction) {
5548 HandleBitwiseOperation(instruction, EOR);
5549}
5550
5551void LocationsBuilderARMVIXL::HandleBitwiseOperation(HBinaryOperation* instruction, Opcode opcode) {
5552 LocationSummary* locations =
5553 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5554 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
5555 || instruction->GetResultType() == Primitive::kPrimLong);
5556 // Note: GVN reorders commutative operations to have the constant on the right hand side.
5557 locations->SetInAt(0, Location::RequiresRegister());
5558 locations->SetInAt(1, ArmEncodableConstantOrRegister(instruction->InputAt(1), opcode));
5559 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5560}
5561
5562void InstructionCodeGeneratorARMVIXL::VisitAnd(HAnd* instruction) {
5563 HandleBitwiseOperation(instruction);
5564}
5565
5566void InstructionCodeGeneratorARMVIXL::VisitOr(HOr* instruction) {
5567 HandleBitwiseOperation(instruction);
5568}
5569
5570void InstructionCodeGeneratorARMVIXL::VisitXor(HXor* instruction) {
5571 HandleBitwiseOperation(instruction);
5572}
5573
Artem Serov2bbc9532016-10-21 11:51:50 +01005574void LocationsBuilderARMVIXL::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instruction) {
5575 LocationSummary* locations =
5576 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5577 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
5578 || instruction->GetResultType() == Primitive::kPrimLong);
5579
5580 locations->SetInAt(0, Location::RequiresRegister());
5581 locations->SetInAt(1, Location::RequiresRegister());
5582 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5583}
5584
5585void InstructionCodeGeneratorARMVIXL::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instruction) {
5586 LocationSummary* locations = instruction->GetLocations();
5587 Location first = locations->InAt(0);
5588 Location second = locations->InAt(1);
5589 Location out = locations->Out();
5590
5591 if (instruction->GetResultType() == Primitive::kPrimInt) {
5592 vixl32::Register first_reg = RegisterFrom(first);
5593 vixl32::Register second_reg = RegisterFrom(second);
5594 vixl32::Register out_reg = RegisterFrom(out);
5595
5596 switch (instruction->GetOpKind()) {
5597 case HInstruction::kAnd:
5598 __ Bic(out_reg, first_reg, second_reg);
5599 break;
5600 case HInstruction::kOr:
5601 __ Orn(out_reg, first_reg, second_reg);
5602 break;
5603 // There is no EON on arm.
5604 case HInstruction::kXor:
5605 default:
5606 LOG(FATAL) << "Unexpected instruction " << instruction->DebugName();
5607 UNREACHABLE();
5608 }
5609 return;
5610
5611 } else {
5612 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
5613 vixl32::Register first_low = LowRegisterFrom(first);
5614 vixl32::Register first_high = HighRegisterFrom(first);
5615 vixl32::Register second_low = LowRegisterFrom(second);
5616 vixl32::Register second_high = HighRegisterFrom(second);
5617 vixl32::Register out_low = LowRegisterFrom(out);
5618 vixl32::Register out_high = HighRegisterFrom(out);
5619
5620 switch (instruction->GetOpKind()) {
5621 case HInstruction::kAnd:
5622 __ Bic(out_low, first_low, second_low);
5623 __ Bic(out_high, first_high, second_high);
5624 break;
5625 case HInstruction::kOr:
5626 __ Orn(out_low, first_low, second_low);
5627 __ Orn(out_high, first_high, second_high);
5628 break;
5629 // There is no EON on arm.
5630 case HInstruction::kXor:
5631 default:
5632 LOG(FATAL) << "Unexpected instruction " << instruction->DebugName();
5633 UNREACHABLE();
5634 }
5635 }
5636}
5637
Artem Serov02109dd2016-09-23 17:17:54 +01005638// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
5639void InstructionCodeGeneratorARMVIXL::GenerateAndConst(vixl32::Register out,
5640 vixl32::Register first,
5641 uint32_t value) {
5642 // Optimize special cases for individual halfs of `and-long` (`and` is simplified earlier).
5643 if (value == 0xffffffffu) {
5644 if (!out.Is(first)) {
5645 __ Mov(out, first);
5646 }
5647 return;
5648 }
5649 if (value == 0u) {
5650 __ Mov(out, 0);
5651 return;
5652 }
5653 if (GetAssembler()->ShifterOperandCanHold(AND, value)) {
5654 __ And(out, first, value);
5655 } else {
5656 DCHECK(GetAssembler()->ShifterOperandCanHold(BIC, ~value));
5657 __ Bic(out, first, ~value);
5658 }
5659}
5660
5661// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
5662void InstructionCodeGeneratorARMVIXL::GenerateOrrConst(vixl32::Register out,
5663 vixl32::Register first,
5664 uint32_t value) {
5665 // Optimize special cases for individual halfs of `or-long` (`or` is simplified earlier).
5666 if (value == 0u) {
5667 if (!out.Is(first)) {
5668 __ Mov(out, first);
5669 }
5670 return;
5671 }
5672 if (value == 0xffffffffu) {
5673 __ Mvn(out, 0);
5674 return;
5675 }
5676 if (GetAssembler()->ShifterOperandCanHold(ORR, value)) {
5677 __ Orr(out, first, value);
5678 } else {
5679 DCHECK(GetAssembler()->ShifterOperandCanHold(ORN, ~value));
5680 __ Orn(out, first, ~value);
5681 }
5682}
5683
5684// TODO(VIXL): Remove optimizations in the helper when they are implemented in vixl.
5685void InstructionCodeGeneratorARMVIXL::GenerateEorConst(vixl32::Register out,
5686 vixl32::Register first,
5687 uint32_t value) {
5688 // Optimize special case for individual halfs of `xor-long` (`xor` is simplified earlier).
5689 if (value == 0u) {
5690 if (!out.Is(first)) {
5691 __ Mov(out, first);
5692 }
5693 return;
5694 }
5695 __ Eor(out, first, value);
5696}
5697
5698void InstructionCodeGeneratorARMVIXL::HandleBitwiseOperation(HBinaryOperation* instruction) {
5699 LocationSummary* locations = instruction->GetLocations();
5700 Location first = locations->InAt(0);
5701 Location second = locations->InAt(1);
5702 Location out = locations->Out();
5703
5704 if (second.IsConstant()) {
5705 uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant()));
5706 uint32_t value_low = Low32Bits(value);
5707 if (instruction->GetResultType() == Primitive::kPrimInt) {
5708 vixl32::Register first_reg = InputRegisterAt(instruction, 0);
5709 vixl32::Register out_reg = OutputRegister(instruction);
5710 if (instruction->IsAnd()) {
5711 GenerateAndConst(out_reg, first_reg, value_low);
5712 } else if (instruction->IsOr()) {
5713 GenerateOrrConst(out_reg, first_reg, value_low);
5714 } else {
5715 DCHECK(instruction->IsXor());
5716 GenerateEorConst(out_reg, first_reg, value_low);
5717 }
5718 } else {
5719 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
5720 uint32_t value_high = High32Bits(value);
5721 vixl32::Register first_low = LowRegisterFrom(first);
5722 vixl32::Register first_high = HighRegisterFrom(first);
5723 vixl32::Register out_low = LowRegisterFrom(out);
5724 vixl32::Register out_high = HighRegisterFrom(out);
5725 if (instruction->IsAnd()) {
5726 GenerateAndConst(out_low, first_low, value_low);
5727 GenerateAndConst(out_high, first_high, value_high);
5728 } else if (instruction->IsOr()) {
5729 GenerateOrrConst(out_low, first_low, value_low);
5730 GenerateOrrConst(out_high, first_high, value_high);
5731 } else {
5732 DCHECK(instruction->IsXor());
5733 GenerateEorConst(out_low, first_low, value_low);
5734 GenerateEorConst(out_high, first_high, value_high);
5735 }
5736 }
5737 return;
5738 }
5739
5740 if (instruction->GetResultType() == Primitive::kPrimInt) {
5741 vixl32::Register first_reg = InputRegisterAt(instruction, 0);
5742 vixl32::Register second_reg = InputRegisterAt(instruction, 1);
5743 vixl32::Register out_reg = OutputRegister(instruction);
5744 if (instruction->IsAnd()) {
5745 __ And(out_reg, first_reg, second_reg);
5746 } else if (instruction->IsOr()) {
5747 __ Orr(out_reg, first_reg, second_reg);
5748 } else {
5749 DCHECK(instruction->IsXor());
5750 __ Eor(out_reg, first_reg, second_reg);
5751 }
5752 } else {
5753 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
5754 vixl32::Register first_low = LowRegisterFrom(first);
5755 vixl32::Register first_high = HighRegisterFrom(first);
5756 vixl32::Register second_low = LowRegisterFrom(second);
5757 vixl32::Register second_high = HighRegisterFrom(second);
5758 vixl32::Register out_low = LowRegisterFrom(out);
5759 vixl32::Register out_high = HighRegisterFrom(out);
5760 if (instruction->IsAnd()) {
5761 __ And(out_low, first_low, second_low);
5762 __ And(out_high, first_high, second_high);
5763 } else if (instruction->IsOr()) {
5764 __ Orr(out_low, first_low, second_low);
5765 __ Orr(out_high, first_high, second_high);
5766 } else {
5767 DCHECK(instruction->IsXor());
5768 __ Eor(out_low, first_low, second_low);
5769 __ Eor(out_high, first_high, second_high);
5770 }
5771 }
5772}
5773
Artem Serovcfbe9132016-10-14 15:58:56 +01005774void InstructionCodeGeneratorARMVIXL::GenerateReferenceLoadOneRegister(
5775 HInstruction* instruction ATTRIBUTE_UNUSED,
5776 Location out,
5777 uint32_t offset,
5778 Location maybe_temp ATTRIBUTE_UNUSED) {
5779 vixl32::Register out_reg = RegisterFrom(out);
5780 if (kEmitCompilerReadBarrier) {
5781 TODO_VIXL32(FATAL);
5782 } else {
5783 // Plain load with no read barrier.
5784 // /* HeapReference<Object> */ out = *(out + offset)
5785 GetAssembler()->LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
5786 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5787 }
5788}
5789
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005790void InstructionCodeGeneratorARMVIXL::GenerateReferenceLoadTwoRegisters(
5791 HInstruction* instruction ATTRIBUTE_UNUSED,
5792 Location out,
5793 Location obj,
5794 uint32_t offset,
5795 Location maybe_temp ATTRIBUTE_UNUSED) {
5796 vixl32::Register out_reg = RegisterFrom(out);
5797 vixl32::Register obj_reg = RegisterFrom(obj);
5798 if (kEmitCompilerReadBarrier) {
5799 TODO_VIXL32(FATAL);
5800 } else {
5801 // Plain load with no read barrier.
5802 // /* HeapReference<Object> */ out = *(obj + offset)
5803 GetAssembler()->LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
5804 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5805 }
5806}
5807
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005808void InstructionCodeGeneratorARMVIXL::GenerateGcRootFieldLoad(
5809 HInstruction* instruction ATTRIBUTE_UNUSED,
5810 Location root,
5811 vixl32::Register obj,
5812 uint32_t offset,
5813 bool requires_read_barrier) {
5814 vixl32::Register root_reg = RegisterFrom(root);
5815 if (requires_read_barrier) {
5816 TODO_VIXL32(FATAL);
5817 } else {
5818 // Plain GC root load with no read barrier.
5819 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5820 GetAssembler()->LoadFromOffset(kLoadWord, root_reg, obj, offset);
5821 // Note that GC roots are not affected by heap poisoning, thus we
5822 // do not have to unpoison `root_reg` here.
5823 }
5824}
5825
Roland Levillain6070e882016-11-03 17:51:58 +00005826void CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier(
5827 HInstruction* instruction ATTRIBUTE_UNUSED,
5828 Location ref ATTRIBUTE_UNUSED,
5829 vixl::aarch32::Register obj ATTRIBUTE_UNUSED,
5830 uint32_t offset ATTRIBUTE_UNUSED,
5831 Location temp ATTRIBUTE_UNUSED,
5832 bool needs_null_check ATTRIBUTE_UNUSED) {
5833 TODO_VIXL32(FATAL);
5834}
5835
5836void CodeGeneratorARMVIXL::GenerateReferenceLoadWithBakerReadBarrier(
5837 HInstruction* instruction ATTRIBUTE_UNUSED,
5838 Location ref ATTRIBUTE_UNUSED,
5839 vixl::aarch32::Register obj ATTRIBUTE_UNUSED,
5840 uint32_t offset ATTRIBUTE_UNUSED,
5841 Location index ATTRIBUTE_UNUSED,
5842 ScaleFactor scale_factor ATTRIBUTE_UNUSED,
5843 Location temp ATTRIBUTE_UNUSED,
5844 bool needs_null_check ATTRIBUTE_UNUSED,
5845 bool always_update_field ATTRIBUTE_UNUSED,
5846 vixl::aarch32::Register* temp2 ATTRIBUTE_UNUSED) {
5847 TODO_VIXL32(FATAL);
5848}
5849
Roland Levillain844e6532016-11-03 16:09:47 +00005850void CodeGeneratorARMVIXL::GenerateReadBarrierSlow(HInstruction* instruction ATTRIBUTE_UNUSED,
5851 Location out ATTRIBUTE_UNUSED,
5852 Location ref ATTRIBUTE_UNUSED,
5853 Location obj ATTRIBUTE_UNUSED,
5854 uint32_t offset ATTRIBUTE_UNUSED,
5855 Location index ATTRIBUTE_UNUSED) {
5856 TODO_VIXL32(FATAL);
5857}
5858
Artem Serov02d37832016-10-25 15:25:33 +01005859void CodeGeneratorARMVIXL::MaybeGenerateReadBarrierSlow(HInstruction* instruction ATTRIBUTE_UNUSED,
5860 Location out,
5861 Location ref ATTRIBUTE_UNUSED,
5862 Location obj ATTRIBUTE_UNUSED,
5863 uint32_t offset ATTRIBUTE_UNUSED,
5864 Location index ATTRIBUTE_UNUSED) {
5865 if (kEmitCompilerReadBarrier) {
5866 DCHECK(!kUseBakerReadBarrier);
5867 TODO_VIXL32(FATAL);
5868 } else if (kPoisonHeapReferences) {
5869 GetAssembler()->UnpoisonHeapReference(RegisterFrom(out));
5870 }
5871}
5872
5873// Check if the desired_dispatch_info is supported. If it is, return it,
5874// otherwise return a fall-back info that should be used instead.
5875HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARMVIXL::GetSupportedInvokeStaticOrDirectDispatch(
5876 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info ATTRIBUTE_UNUSED,
5877 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
5878 // TODO(VIXL): Implement optimized code paths.
5879 return {
5880 HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod,
5881 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
5882 0u,
5883 0u
5884 };
5885}
5886
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005887vixl32::Register CodeGeneratorARMVIXL::GetInvokeStaticOrDirectExtraParameter(
5888 HInvokeStaticOrDirect* invoke, vixl32::Register temp) {
5889 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
5890 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
5891 if (!invoke->GetLocations()->Intrinsified()) {
5892 return RegisterFrom(location);
5893 }
5894 // For intrinsics we allow any location, so it may be on the stack.
5895 if (!location.IsRegister()) {
5896 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, location.GetStackIndex());
5897 return temp;
5898 }
5899 // For register locations, check if the register was saved. If so, get it from the stack.
5900 // Note: There is a chance that the register was saved but not overwritten, so we could
5901 // save one load. However, since this is just an intrinsic slow path we prefer this
5902 // simple and more robust approach rather that trying to determine if that's the case.
5903 SlowPathCode* slow_path = GetCurrentSlowPath();
5904 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
5905 if (slow_path->IsCoreRegisterSaved(RegisterFrom(location).GetCode())) {
5906 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(RegisterFrom(location).GetCode());
5907 GetAssembler()->LoadFromOffset(kLoadWord, temp, sp, stack_offset);
5908 return temp;
5909 }
5910 return RegisterFrom(location);
5911}
5912
5913void CodeGeneratorARMVIXL::GenerateStaticOrDirectCall(
5914 HInvokeStaticOrDirect* invoke, Location temp) {
5915 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
5916 vixl32::Register temp_reg = RegisterFrom(temp);
5917
5918 switch (invoke->GetMethodLoadKind()) {
5919 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
5920 uint32_t offset =
5921 GetThreadOffset<kArmPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
5922 // temp = thread->string_init_entrypoint
5923 GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, tr, offset);
5924 break;
5925 }
5926 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
5927 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
5928 vixl32::Register method_reg;
5929 if (current_method.IsRegister()) {
5930 method_reg = RegisterFrom(current_method);
5931 } else {
Anton Kirilove28d9ae2016-10-25 18:17:23 +01005932 DCHECK(invoke->GetLocations()->Intrinsified());
5933 DCHECK(!current_method.IsValid());
5934 method_reg = temp_reg;
5935 GetAssembler()->LoadFromOffset(kLoadWord, temp_reg, sp, kCurrentMethodStackOffset);
Scott Wakelinga7812ae2016-10-17 10:03:36 +01005936 }
5937 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
5938 GetAssembler()->LoadFromOffset(
5939 kLoadWord,
5940 temp_reg,
5941 method_reg,
5942 ArtMethod::DexCacheResolvedMethodsOffset(kArmPointerSize).Int32Value());
5943 // temp = temp[index_in_cache];
5944 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
5945 uint32_t index_in_cache = invoke->GetDexMethodIndex();
5946 GetAssembler()->LoadFromOffset(
5947 kLoadWord, temp_reg, temp_reg, CodeGenerator::GetCachePointerOffset(index_in_cache));
5948 break;
5949 }
5950 default:
5951 TODO_VIXL32(FATAL);
5952 }
5953
5954 // TODO(VIXL): Support `CodePtrLocation` values other than `kCallArtMethod`.
5955 if (invoke->GetCodePtrLocation() != HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod) {
5956 TODO_VIXL32(FATAL);
5957 }
5958
5959 // LR = callee_method->entry_point_from_quick_compiled_code_
5960 GetAssembler()->LoadFromOffset(
5961 kLoadWord,
5962 lr,
5963 RegisterFrom(callee_method),
5964 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value());
5965 // LR()
5966 __ Blx(lr);
5967
5968 DCHECK(!IsLeafMethod());
5969}
5970
5971void CodeGeneratorARMVIXL::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) {
5972 vixl32::Register temp = RegisterFrom(temp_location);
5973 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5974 invoke->GetVTableIndex(), kArmPointerSize).Uint32Value();
5975
5976 // Use the calling convention instead of the location of the receiver, as
5977 // intrinsics may have put the receiver in a different register. In the intrinsics
5978 // slow path, the arguments have been moved to the right place, so here we are
5979 // guaranteed that the receiver is the first register of the calling convention.
5980 InvokeDexCallingConventionARMVIXL calling_convention;
5981 vixl32::Register receiver = calling_convention.GetRegisterAt(0);
5982 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5983 // /* HeapReference<Class> */ temp = receiver->klass_
5984 GetAssembler()->LoadFromOffset(kLoadWord, temp, receiver, class_offset);
5985 MaybeRecordImplicitNullCheck(invoke);
5986 // Instead of simply (possibly) unpoisoning `temp` here, we should
5987 // emit a read barrier for the previous class reference load.
5988 // However this is not required in practice, as this is an
5989 // intermediate/temporary reference and because the current
5990 // concurrent copying collector keeps the from-space memory
5991 // intact/accessible until the end of the marking phase (the
5992 // concurrent copying collector may not in the future).
5993 GetAssembler()->MaybeUnpoisonHeapReference(temp);
5994
5995 // temp = temp->GetMethodAt(method_offset);
5996 uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
5997 kArmPointerSize).Int32Value();
5998 GetAssembler()->LoadFromOffset(kLoadWord, temp, temp, method_offset);
5999 // LR = temp->GetEntryPoint();
6000 GetAssembler()->LoadFromOffset(kLoadWord, lr, temp, entry_point);
6001 // LR();
6002 __ Blx(lr);
6003}
6004
Artem Serov2bbc9532016-10-21 11:51:50 +01006005void LocationsBuilderARMVIXL::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
6006 LocationSummary* locations =
6007 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
6008 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
6009 Location::RequiresRegister());
6010 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
6011 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
6012 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6013}
6014
6015void InstructionCodeGeneratorARMVIXL::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
6016 vixl32::Register res = OutputRegister(instr);
6017 vixl32::Register accumulator =
6018 InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
6019 vixl32::Register mul_left =
6020 InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
6021 vixl32::Register mul_right =
6022 InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
6023
6024 if (instr->GetOpKind() == HInstruction::kAdd) {
6025 __ Mla(res, mul_left, mul_right, accumulator);
6026 } else {
6027 __ Mls(res, mul_left, mul_right, accumulator);
6028 }
6029}
6030
Artem Serov551b28f2016-10-18 19:11:30 +01006031void LocationsBuilderARMVIXL::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6032 // Nothing to do, this should be removed during prepare for register allocator.
6033 LOG(FATAL) << "Unreachable";
6034}
6035
6036void InstructionCodeGeneratorARMVIXL::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6037 // Nothing to do, this should be removed during prepare for register allocator.
6038 LOG(FATAL) << "Unreachable";
6039}
6040
6041// Simple implementation of packed switch - generate cascaded compare/jumps.
6042void LocationsBuilderARMVIXL::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6043 LocationSummary* locations =
6044 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6045 locations->SetInAt(0, Location::RequiresRegister());
6046 if (switch_instr->GetNumEntries() > kPackedSwitchCompareJumpThreshold &&
6047 codegen_->GetAssembler()->GetVIXLAssembler()->IsUsingT32()) {
6048 locations->AddTemp(Location::RequiresRegister()); // We need a temp for the table base.
6049 if (switch_instr->GetStartValue() != 0) {
6050 locations->AddTemp(Location::RequiresRegister()); // We need a temp for the bias.
6051 }
6052 }
6053}
6054
6055// TODO(VIXL): Investigate and reach the parity with old arm codegen.
6056void InstructionCodeGeneratorARMVIXL::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6057 int32_t lower_bound = switch_instr->GetStartValue();
6058 uint32_t num_entries = switch_instr->GetNumEntries();
6059 LocationSummary* locations = switch_instr->GetLocations();
6060 vixl32::Register value_reg = InputRegisterAt(switch_instr, 0);
6061 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6062
6063 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
6064 !codegen_->GetAssembler()->GetVIXLAssembler()->IsUsingT32()) {
6065 // Create a series of compare/jumps.
6066 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
6067 vixl32::Register temp_reg = temps.Acquire();
6068 // Note: It is fine for the below AddConstantSetFlags() using IP register to temporarily store
6069 // the immediate, because IP is used as the destination register. For the other
6070 // AddConstantSetFlags() and GenerateCompareWithImmediate(), the immediate values are constant,
6071 // and they can be encoded in the instruction without making use of IP register.
6072 __ Adds(temp_reg, value_reg, -lower_bound);
6073
6074 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6075 // Jump to successors[0] if value == lower_bound.
6076 __ B(eq, codegen_->GetLabelOf(successors[0]));
6077 int32_t last_index = 0;
6078 for (; num_entries - last_index > 2; last_index += 2) {
6079 __ Adds(temp_reg, temp_reg, -2);
6080 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
6081 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
6082 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
6083 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
6084 }
6085 if (num_entries - last_index == 2) {
6086 // The last missing case_value.
6087 __ Cmp(temp_reg, 1);
6088 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
6089 }
6090
6091 // And the default for any other value.
6092 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6093 __ B(codegen_->GetLabelOf(default_block));
6094 }
6095 } else {
6096 // Create a table lookup.
6097 vixl32::Register table_base = RegisterFrom(locations->GetTemp(0));
6098
6099 JumpTableARMVIXL* jump_table = codegen_->CreateJumpTable(switch_instr);
6100
6101 // Remove the bias.
6102 vixl32::Register key_reg;
6103 if (lower_bound != 0) {
6104 key_reg = RegisterFrom(locations->GetTemp(1));
6105 __ Sub(key_reg, value_reg, lower_bound);
6106 } else {
6107 key_reg = value_reg;
6108 }
6109
6110 // Check whether the value is in the table, jump to default block if not.
6111 __ Cmp(key_reg, num_entries - 1);
6112 __ B(hi, codegen_->GetLabelOf(default_block));
6113
6114 UseScratchRegisterScope temps(GetAssembler()->GetVIXLAssembler());
6115 vixl32::Register jump_offset = temps.Acquire();
6116
6117 // Load jump offset from the table.
6118 __ Adr(table_base, jump_table->GetTableStartLabel());
6119 __ Ldr(jump_offset, MemOperand(table_base, key_reg, vixl32::LSL, 2));
6120
6121 // Jump to target block by branching to table_base(pc related) + offset.
6122 vixl32::Register target_address = table_base;
6123 __ Add(target_address, table_base, jump_offset);
6124 __ Bx(target_address);
Artem Serov09a940d2016-11-11 16:15:11 +00006125
6126 jump_table->EmitTable(codegen_);
Artem Serov551b28f2016-10-18 19:11:30 +01006127 }
6128}
6129
Artem Serov02d37832016-10-25 15:25:33 +01006130// Copy the result of a call into the given target.
Anton Kirilove28d9ae2016-10-25 18:17:23 +01006131void CodeGeneratorARMVIXL::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6132 if (!trg.IsValid()) {
6133 DCHECK_EQ(type, Primitive::kPrimVoid);
6134 return;
6135 }
6136
6137 DCHECK_NE(type, Primitive::kPrimVoid);
6138
6139 Location return_loc = InvokeDexCallingConventionVisitorARM().GetReturnLocation(type);
6140 if (return_loc.Equals(trg)) {
6141 return;
6142 }
6143
6144 // TODO: Consider pairs in the parallel move resolver, then this could be nicely merged
6145 // with the last branch.
6146 if (type == Primitive::kPrimLong) {
6147 TODO_VIXL32(FATAL);
6148 } else if (type == Primitive::kPrimDouble) {
6149 TODO_VIXL32(FATAL);
6150 } else {
6151 // Let the parallel move resolver take care of all of this.
6152 HParallelMove parallel_move(GetGraph()->GetArena());
6153 parallel_move.AddMove(return_loc, trg, type, nullptr);
6154 GetMoveResolver()->EmitNativeCode(&parallel_move);
6155 }
Scott Wakelinga7812ae2016-10-17 10:03:36 +01006156}
Scott Wakelingfe885462016-09-22 10:24:38 +01006157
Artem Serov551b28f2016-10-18 19:11:30 +01006158void LocationsBuilderARMVIXL::VisitClassTableGet(
6159 HClassTableGet* instruction ATTRIBUTE_UNUSED) {
6160 TODO_VIXL32(FATAL);
6161}
6162
6163void InstructionCodeGeneratorARMVIXL::VisitClassTableGet(
6164 HClassTableGet* instruction ATTRIBUTE_UNUSED) {
6165 TODO_VIXL32(FATAL);
6166}
6167
6168
Scott Wakelingfe885462016-09-22 10:24:38 +01006169#undef __
6170#undef QUICK_ENTRY_POINT
6171#undef TODO_VIXL32
6172
6173} // namespace arm
6174} // namespace art