blob: cc7bf3c05f67a0b611078d1e6b19a1b463cbf66a [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Andreas Gampe878d58c2015-01-15 23:24:00 -080019#include "common_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010020#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080021#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010022#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080023#include "intrinsics.h"
24#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "mirror/array-inl.h"
26#include "mirror/art_method.h"
27#include "mirror/class.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000028#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010029#include "thread.h"
30#include "utils/arm64/assembler_arm64.h"
31#include "utils/assembler.h"
32#include "utils/stack_checks.h"
33
34
35using namespace vixl; // NOLINT(build/namespaces)
36
37#ifdef __
38#error "ARM64 Codegen VIXL macro-assembler macro already defined."
39#endif
40
Alexandre Rames5319def2014-10-23 10:03:10 +010041namespace art {
42
43namespace arm64 {
44
Andreas Gampe878d58c2015-01-15 23:24:00 -080045using helpers::CPURegisterFrom;
46using helpers::DRegisterFrom;
47using helpers::FPRegisterFrom;
48using helpers::HeapOperand;
49using helpers::HeapOperandFrom;
50using helpers::InputCPURegisterAt;
51using helpers::InputFPRegisterAt;
52using helpers::InputRegisterAt;
53using helpers::InputOperandAt;
54using helpers::Int64ConstantFrom;
55using helpers::Is64BitType;
56using helpers::IsFPType;
57using helpers::IsIntegralType;
58using helpers::LocationFrom;
59using helpers::OperandFromMemOperand;
60using helpers::OutputCPURegister;
61using helpers::OutputFPRegister;
62using helpers::OutputRegister;
63using helpers::RegisterFrom;
64using helpers::StackOperandFrom;
65using helpers::VIXLRegCodeFromART;
66using helpers::WRegisterFrom;
67using helpers::XRegisterFrom;
68
Alexandre Rames5319def2014-10-23 10:03:10 +010069static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
70static constexpr int kCurrentMethodStackOffset = 0;
71
Alexandre Rames5319def2014-10-23 10:03:10 +010072inline Condition ARM64Condition(IfCondition cond) {
73 switch (cond) {
74 case kCondEQ: return eq;
75 case kCondNE: return ne;
76 case kCondLT: return lt;
77 case kCondLE: return le;
78 case kCondGT: return gt;
79 case kCondGE: return ge;
80 default:
81 LOG(FATAL) << "Unknown if condition";
82 }
83 return nv; // Unreachable.
84}
85
Alexandre Ramesa89086e2014-11-07 17:13:25 +000086Location ARM64ReturnLocation(Primitive::Type return_type) {
87 DCHECK_NE(return_type, Primitive::kPrimVoid);
88 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
89 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
90 // but we use the exact registers for clarity.
91 if (return_type == Primitive::kPrimFloat) {
92 return LocationFrom(s0);
93 } else if (return_type == Primitive::kPrimDouble) {
94 return LocationFrom(d0);
95 } else if (return_type == Primitive::kPrimLong) {
96 return LocationFrom(x0);
97 } else {
98 return LocationFrom(w0);
99 }
100}
101
Alexandre Rames5319def2014-10-23 10:03:10 +0100102static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
103static constexpr size_t kRuntimeParameterCoreRegistersLength =
104 arraysize(kRuntimeParameterCoreRegisters);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000105static const FPRegister kRuntimeParameterFpuRegisters[] = { d0, d1, d2, d3, d4, d5, d6, d7 };
106static constexpr size_t kRuntimeParameterFpuRegistersLength =
107 arraysize(kRuntimeParameterCoreRegisters);
Alexandre Rames5319def2014-10-23 10:03:10 +0100108
109class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
110 public:
111 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
112
113 InvokeRuntimeCallingConvention()
114 : CallingConvention(kRuntimeParameterCoreRegisters,
115 kRuntimeParameterCoreRegistersLength,
116 kRuntimeParameterFpuRegisters,
117 kRuntimeParameterFpuRegistersLength) {}
118
119 Location GetReturnLocation(Primitive::Type return_type);
120
121 private:
122 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
123};
124
125Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000126 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100127}
128
Alexandre Rames67555f72014-11-18 10:55:16 +0000129#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
130#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100131
Alexandre Rames5319def2014-10-23 10:03:10 +0100132class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
133 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000134 BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
135 Location index_location,
136 Location length_location)
137 : instruction_(instruction),
138 index_location_(index_location),
139 length_location_(length_location) {}
140
Alexandre Rames5319def2014-10-23 10:03:10 +0100141
Alexandre Rames67555f72014-11-18 10:55:16 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000143 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100144 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000145 // We're moving two locations to locations that could overlap, so we need a parallel
146 // move resolver.
147 InvokeRuntimeCallingConvention calling_convention;
148 codegen->EmitParallelMoves(
149 index_location_, LocationFrom(calling_convention.GetRegisterAt(0)),
150 length_location_, LocationFrom(calling_convention.GetRegisterAt(1)));
151 arm64_codegen->InvokeRuntime(
152 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800153 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100154 }
155
156 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000157 HBoundsCheck* const instruction_;
158 const Location index_location_;
159 const Location length_location_;
160
Alexandre Rames5319def2014-10-23 10:03:10 +0100161 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
162};
163
Alexandre Rames67555f72014-11-18 10:55:16 +0000164class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
165 public:
166 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
167
168 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
169 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
170 __ Bind(GetEntryLabel());
171 arm64_codegen->InvokeRuntime(
172 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800173 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000174 }
175
176 private:
177 HDivZeroCheck* const instruction_;
178 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
179};
180
181class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
182 public:
183 LoadClassSlowPathARM64(HLoadClass* cls,
184 HInstruction* at,
185 uint32_t dex_pc,
186 bool do_clinit)
187 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
188 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
189 }
190
191 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
192 LocationSummary* locations = at_->GetLocations();
193 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
194
195 __ Bind(GetEntryLabel());
196 codegen->SaveLiveRegisters(locations);
197
198 InvokeRuntimeCallingConvention calling_convention;
199 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
200 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
201 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
202 : QUICK_ENTRY_POINT(pInitializeType);
203 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800204 if (do_clinit_) {
205 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t, mirror::ArtMethod*>();
206 } else {
207 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t, mirror::ArtMethod*>();
208 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000209
210 // Move the class to the desired location.
211 Location out = locations->Out();
212 if (out.IsValid()) {
213 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
214 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000215 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000216 }
217
218 codegen->RestoreLiveRegisters(locations);
219 __ B(GetExitLabel());
220 }
221
222 private:
223 // The class this slow path will load.
224 HLoadClass* const cls_;
225
226 // The instruction where this slow path is happening.
227 // (Might be the load class or an initialization check).
228 HInstruction* const at_;
229
230 // The dex PC of `at_`.
231 const uint32_t dex_pc_;
232
233 // Whether to initialize the class.
234 const bool do_clinit_;
235
236 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
237};
238
239class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
240 public:
241 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
242
243 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
244 LocationSummary* locations = instruction_->GetLocations();
245 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
246 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
247
248 __ Bind(GetEntryLabel());
249 codegen->SaveLiveRegisters(locations);
250
251 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800252 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
253 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000254 arm64_codegen->InvokeRuntime(
255 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800256 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000257 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000258 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000259
260 codegen->RestoreLiveRegisters(locations);
261 __ B(GetExitLabel());
262 }
263
264 private:
265 HLoadString* const instruction_;
266
267 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
268};
269
Alexandre Rames5319def2014-10-23 10:03:10 +0100270class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
271 public:
272 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
273
Alexandre Rames67555f72014-11-18 10:55:16 +0000274 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
275 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100276 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000277 arm64_codegen->InvokeRuntime(
278 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800279 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100280 }
281
282 private:
283 HNullCheck* const instruction_;
284
285 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
286};
287
288class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
289 public:
290 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
291 HBasicBlock* successor)
292 : instruction_(instruction), successor_(successor) {}
293
Alexandre Rames67555f72014-11-18 10:55:16 +0000294 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
295 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100296 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000297 codegen->SaveLiveRegisters(instruction_->GetLocations());
298 arm64_codegen->InvokeRuntime(
299 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800300 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000301 codegen->RestoreLiveRegisters(instruction_->GetLocations());
302 if (successor_ == nullptr) {
303 __ B(GetReturnLabel());
304 } else {
305 __ B(arm64_codegen->GetLabelOf(successor_));
306 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100307 }
308
309 vixl::Label* GetReturnLabel() {
310 DCHECK(successor_ == nullptr);
311 return &return_label_;
312 }
313
Alexandre Rames5319def2014-10-23 10:03:10 +0100314 private:
315 HSuspendCheck* const instruction_;
316 // If not null, the block to branch to after the suspend check.
317 HBasicBlock* const successor_;
318
319 // If `successor_` is null, the label to branch to after the suspend check.
320 vixl::Label return_label_;
321
322 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
323};
324
Alexandre Rames67555f72014-11-18 10:55:16 +0000325class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
326 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000327 TypeCheckSlowPathARM64(HInstruction* instruction,
328 Location class_to_check,
329 Location object_class,
330 uint32_t dex_pc)
331 : instruction_(instruction),
332 class_to_check_(class_to_check),
333 object_class_(object_class),
334 dex_pc_(dex_pc) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000335
336 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000337 LocationSummary* locations = instruction_->GetLocations();
338 DCHECK(instruction_->IsCheckCast()
339 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
340 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
341
Alexandre Rames67555f72014-11-18 10:55:16 +0000342 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000343 codegen->SaveLiveRegisters(locations);
344
345 // We're moving two locations to locations that could overlap, so we need a parallel
346 // move resolver.
347 InvokeRuntimeCallingConvention calling_convention;
348 codegen->EmitParallelMoves(
349 class_to_check_, LocationFrom(calling_convention.GetRegisterAt(0)),
350 object_class_, LocationFrom(calling_convention.GetRegisterAt(1)));
351
352 if (instruction_->IsInstanceOf()) {
353 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
354 Primitive::Type ret_type = instruction_->GetType();
355 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
356 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800357 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
358 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000359 } else {
360 DCHECK(instruction_->IsCheckCast());
361 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800362 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000363 }
364
365 codegen->RestoreLiveRegisters(locations);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000366 __ B(GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000367 }
368
369 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000370 HInstruction* const instruction_;
371 const Location class_to_check_;
372 const Location object_class_;
373 uint32_t dex_pc_;
374
Alexandre Rames67555f72014-11-18 10:55:16 +0000375 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
376};
377
Alexandre Rames5319def2014-10-23 10:03:10 +0100378#undef __
379
380Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
381 Location next_location;
382 if (type == Primitive::kPrimVoid) {
383 LOG(FATAL) << "Unreachable type " << type;
384 }
385
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000386 if (IsFPType(type) && (fp_index_ < calling_convention.GetNumberOfFpuRegisters())) {
387 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(fp_index_++));
388 } else if (!IsFPType(type) && (gp_index_ < calling_convention.GetNumberOfRegisters())) {
389 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
390 } else {
391 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
392 next_location = Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
393 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100394 }
395
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000396 // Space on the stack is reserved for all arguments.
397 stack_index_ += Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100398 return next_location;
399}
400
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000401CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph, const CompilerOptions& compiler_options)
Alexandre Rames5319def2014-10-23 10:03:10 +0100402 : CodeGenerator(graph,
403 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000404 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000405 kNumberOfAllocatableRegisterPairs,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000406 (1 << LR),
Nicolas Geoffray98893962015-01-21 12:32:32 +0000407 0,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000408 compiler_options),
Alexandre Rames5319def2014-10-23 10:03:10 +0100409 block_labels_(nullptr),
410 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000411 instruction_visitor_(graph, this),
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000412 move_resolver_(graph->GetArena(), this) {
413 // Save the link register (containing the return address) to mimic Quick.
414 AddAllocatedRegister(Location::RegisterLocation(LR));
415}
Alexandre Rames5319def2014-10-23 10:03:10 +0100416
Alexandre Rames67555f72014-11-18 10:55:16 +0000417#undef __
418#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100419
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000420void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
421 // Ensure we emit the literal pool.
422 __ FinalizeCode();
423 CodeGenerator::Finalize(allocator);
424}
425
Alexandre Rames3e69f162014-12-10 10:36:50 +0000426void ParallelMoveResolverARM64::EmitMove(size_t index) {
427 MoveOperands* move = moves_.Get(index);
428 codegen_->MoveLocation(move->GetDestination(), move->GetSource());
429}
430
431void ParallelMoveResolverARM64::EmitSwap(size_t index) {
432 MoveOperands* move = moves_.Get(index);
433 codegen_->SwapLocations(move->GetDestination(), move->GetSource());
434}
435
436void ParallelMoveResolverARM64::RestoreScratch(int reg) {
437 __ Pop(Register(VIXLRegCodeFromART(reg), kXRegSize));
438}
439
440void ParallelMoveResolverARM64::SpillScratch(int reg) {
441 __ Push(Register(VIXLRegCodeFromART(reg), kXRegSize));
442}
443
Alexandre Rames5319def2014-10-23 10:03:10 +0100444void CodeGeneratorARM64::GenerateFrameEntry() {
Serban Constantinescu02164b32014-11-13 14:05:07 +0000445 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
446 if (do_overflow_check) {
447 UseScratchRegisterScope temps(GetVIXLAssembler());
448 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000449 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
450 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
451 __ Ldr(wzr, MemOperand(temp, 0));
452 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000453 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100454
Alexandre Rames5319def2014-10-23 10:03:10 +0100455 int frame_size = GetFrameSize();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800456 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000457 __ PokeCPURegList(GetFramePreservedRegisters(), frame_size - FrameEntrySpillSize());
Alexandre Rames5319def2014-10-23 10:03:10 +0100458
459 // Stack layout:
460 // sp[frame_size - 8] : lr.
461 // ... : other preserved registers.
462 // sp[frame_size - regs_size]: first preserved register.
463 // ... : reserved frame space.
Alexandre Rames67555f72014-11-18 10:55:16 +0000464 // sp[0] : current method.
Alexandre Rames5319def2014-10-23 10:03:10 +0100465}
466
467void CodeGeneratorARM64::GenerateFrameExit() {
468 int frame_size = GetFrameSize();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000469 __ PeekCPURegList(GetFramePreservedRegisters(), frame_size - FrameEntrySpillSize());
Alexandre Rames5319def2014-10-23 10:03:10 +0100470 __ Drop(frame_size);
471}
472
473void CodeGeneratorARM64::Bind(HBasicBlock* block) {
474 __ Bind(GetLabelOf(block));
475}
476
Alexandre Rames5319def2014-10-23 10:03:10 +0100477void CodeGeneratorARM64::Move(HInstruction* instruction,
478 Location location,
479 HInstruction* move_for) {
480 LocationSummary* locations = instruction->GetLocations();
481 if (locations != nullptr && locations->Out().Equals(location)) {
482 return;
483 }
484
485 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000486 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100487
488 if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
489 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
490 : instruction->AsLongConstant()->GetValue();
491 if (location.IsRegister()) {
492 Register dst = RegisterFrom(location, type);
493 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
494 (instruction->IsLongConstant() && dst.Is64Bits()));
495 __ Mov(dst, value);
496 } else {
497 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000498 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100499 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
500 __ Mov(temp, value);
501 __ Str(temp, StackOperandFrom(location));
502 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000503 } else if (instruction->IsTemporary()) {
504 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000505 MoveLocation(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100506 } else if (instruction->IsLoadLocal()) {
507 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000508 if (Is64BitType(type)) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000509 MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000510 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000511 MoveLocation(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100512 }
513
514 } else {
515 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000516 MoveLocation(location, locations->Out(), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100517 }
518}
519
Alexandre Rames5319def2014-10-23 10:03:10 +0100520Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
521 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000522
Alexandre Rames5319def2014-10-23 10:03:10 +0100523 switch (type) {
524 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000525 case Primitive::kPrimInt:
526 case Primitive::kPrimFloat:
527 return Location::StackSlot(GetStackSlot(load->GetLocal()));
528
529 case Primitive::kPrimLong:
530 case Primitive::kPrimDouble:
531 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
532
Alexandre Rames5319def2014-10-23 10:03:10 +0100533 case Primitive::kPrimBoolean:
534 case Primitive::kPrimByte:
535 case Primitive::kPrimChar:
536 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100537 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100538 LOG(FATAL) << "Unexpected type " << type;
539 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000540
Alexandre Rames5319def2014-10-23 10:03:10 +0100541 LOG(FATAL) << "Unreachable";
542 return Location::NoLocation();
543}
544
545void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000546 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100547 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000548 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +0100549 vixl::Label done;
550 __ Cbz(value, &done);
551 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
552 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000553 __ Strb(card, MemOperand(card, temp.X()));
Alexandre Rames5319def2014-10-23 10:03:10 +0100554 __ Bind(&done);
555}
556
Nicolas Geoffray98893962015-01-21 12:32:32 +0000557void CodeGeneratorARM64::SetupBlockedRegisters(bool is_baseline ATTRIBUTE_UNUSED) const {
Alexandre Rames5319def2014-10-23 10:03:10 +0100558 // Block reserved registers:
559 // ip0 (VIXL temporary)
560 // ip1 (VIXL temporary)
Serban Constantinescu02164b32014-11-13 14:05:07 +0000561 // tr
Alexandre Rames5319def2014-10-23 10:03:10 +0100562 // lr
563 // sp is not part of the allocatable registers, so we don't need to block it.
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000564 // TODO: Avoid blocking callee-saved registers, and instead preserve them
565 // where necessary.
Alexandre Rames5319def2014-10-23 10:03:10 +0100566 CPURegList reserved_core_registers = vixl_reserved_core_registers;
567 reserved_core_registers.Combine(runtime_reserved_core_registers);
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000568 reserved_core_registers.Combine(quick_callee_saved_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100569 while (!reserved_core_registers.IsEmpty()) {
570 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
571 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000572 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
573 reserved_fp_registers.Combine(CPURegList::GetCalleeSavedFP());
574 while (!reserved_core_registers.IsEmpty()) {
575 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
576 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100577}
578
579Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
580 if (type == Primitive::kPrimVoid) {
581 LOG(FATAL) << "Unreachable type " << type;
582 }
583
Alexandre Rames5319def2014-10-23 10:03:10 +0100584 if (IsFPType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000585 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
586 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100587 return Location::FpuRegisterLocation(reg);
588 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000589 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
590 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100591 return Location::RegisterLocation(reg);
592 }
593}
594
Alexandre Rames3e69f162014-12-10 10:36:50 +0000595size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
596 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
597 __ Str(reg, MemOperand(sp, stack_index));
598 return kArm64WordSize;
599}
600
601size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
602 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
603 __ Ldr(reg, MemOperand(sp, stack_index));
604 return kArm64WordSize;
605}
606
607size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
608 FPRegister reg = FPRegister(reg_id, kDRegSize);
609 __ Str(reg, MemOperand(sp, stack_index));
610 return kArm64WordSize;
611}
612
613size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
614 FPRegister reg = FPRegister(reg_id, kDRegSize);
615 __ Ldr(reg, MemOperand(sp, stack_index));
616 return kArm64WordSize;
617}
618
Alexandre Rames5319def2014-10-23 10:03:10 +0100619void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
620 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
621}
622
623void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
624 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
625}
626
Alexandre Rames67555f72014-11-18 10:55:16 +0000627void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
628 if (constant->IsIntConstant() || constant->IsLongConstant()) {
629 __ Mov(Register(destination),
630 constant->IsIntConstant() ? constant->AsIntConstant()->GetValue()
631 : constant->AsLongConstant()->GetValue());
632 } else if (constant->IsFloatConstant()) {
633 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
634 } else {
635 DCHECK(constant->IsDoubleConstant());
636 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
637 }
638}
639
Alexandre Rames3e69f162014-12-10 10:36:50 +0000640
641static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
642 DCHECK(constant.IsConstant());
643 HConstant* cst = constant.GetConstant();
644 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
645 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
646 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
647 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
648}
649
650void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000651 if (source.Equals(destination)) {
652 return;
653 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000654
655 // A valid move can always be inferred from the destination and source
656 // locations. When moving from and to a register, the argument type can be
657 // used to generate 32bit instead of 64bit moves. In debug mode we also
658 // checks the coherency of the locations and the type.
659 bool unspecified_type = (type == Primitive::kPrimVoid);
660
661 if (destination.IsRegister() || destination.IsFpuRegister()) {
662 if (unspecified_type) {
663 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
664 if (source.IsStackSlot() ||
665 (src_cst != nullptr && (src_cst->IsIntConstant() || src_cst->IsFloatConstant()))) {
666 // For stack slots and 32bit constants, a 64bit type is appropriate.
667 type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +0000668 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000669 // If the source is a double stack slot or a 64bit constant, a 64bit
670 // type is appropriate. Else the source is a register, and since the
671 // type has not been specified, we chose a 64bit type to force a 64bit
672 // move.
673 type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +0000674 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000675 }
676 DCHECK((destination.IsFpuRegister() && IsFPType(type)) ||
677 (destination.IsRegister() && !IsFPType(type)));
678 CPURegister dst = CPURegisterFrom(destination, type);
679 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
680 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
681 __ Ldr(dst, StackOperandFrom(source));
682 } else if (source.IsConstant()) {
683 DCHECK(CoherentConstantAndType(source, type));
684 MoveConstant(dst, source.GetConstant());
685 } else {
686 if (destination.IsRegister()) {
687 __ Mov(Register(dst), RegisterFrom(source, type));
688 } else {
689 __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
690 }
691 }
692
693 } else { // The destination is not a register. It must be a stack slot.
694 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
695 if (source.IsRegister() || source.IsFpuRegister()) {
696 if (unspecified_type) {
697 if (source.IsRegister()) {
698 type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
699 } else {
700 type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
701 }
702 }
703 DCHECK((destination.IsDoubleStackSlot() == Is64BitType(type)) &&
704 (source.IsFpuRegister() == IsFPType(type)));
705 __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
706 } else if (source.IsConstant()) {
707 DCHECK(unspecified_type || CoherentConstantAndType(source, type));
708 UseScratchRegisterScope temps(GetVIXLAssembler());
709 HConstant* src_cst = source.GetConstant();
710 CPURegister temp;
711 if (src_cst->IsIntConstant()) {
712 temp = temps.AcquireW();
713 } else if (src_cst->IsLongConstant()) {
714 temp = temps.AcquireX();
715 } else if (src_cst->IsFloatConstant()) {
716 temp = temps.AcquireS();
717 } else {
718 DCHECK(src_cst->IsDoubleConstant());
719 temp = temps.AcquireD();
720 }
721 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000722 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000723 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +0000724 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000725 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000726 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000727 // There is generally less pressure on FP registers.
728 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000729 __ Ldr(temp, StackOperandFrom(source));
730 __ Str(temp, StackOperandFrom(destination));
731 }
732 }
733}
734
Alexandre Rames3e69f162014-12-10 10:36:50 +0000735void CodeGeneratorARM64::SwapLocations(Location loc1, Location loc2) {
736 DCHECK(!loc1.IsConstant());
737 DCHECK(!loc2.IsConstant());
738
739 if (loc1.Equals(loc2)) {
740 return;
741 }
742
743 UseScratchRegisterScope temps(GetAssembler()->vixl_masm_);
744
745 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
746 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
747 bool is_fp_reg1 = loc1.IsFpuRegister();
748 bool is_fp_reg2 = loc2.IsFpuRegister();
749
750 if (loc2.IsRegister() && loc1.IsRegister()) {
751 Register r1 = XRegisterFrom(loc1);
752 Register r2 = XRegisterFrom(loc2);
753 Register tmp = temps.AcquireSameSizeAs(r1);
754 __ Mov(tmp, r2);
755 __ Mov(r2, r1);
756 __ Mov(r1, tmp);
757 } else if (is_fp_reg2 && is_fp_reg1) {
758 FPRegister r1 = DRegisterFrom(loc1);
759 FPRegister r2 = DRegisterFrom(loc2);
760 FPRegister tmp = temps.AcquireSameSizeAs(r1);
761 __ Fmov(tmp, r2);
762 __ Fmov(r2, r1);
763 __ Fmov(r1, tmp);
764 } else if (is_slot1 != is_slot2) {
765 MemOperand mem = StackOperandFrom(is_slot1 ? loc1 : loc2);
766 Location reg_loc = is_slot1 ? loc2 : loc1;
767 CPURegister reg, tmp;
768 if (reg_loc.IsFpuRegister()) {
769 reg = DRegisterFrom(reg_loc);
770 tmp = temps.AcquireD();
771 } else {
772 reg = XRegisterFrom(reg_loc);
773 tmp = temps.AcquireX();
774 }
775 __ Ldr(tmp, mem);
776 __ Str(reg, mem);
777 if (reg_loc.IsFpuRegister()) {
778 __ Fmov(FPRegister(reg), FPRegister(tmp));
779 } else {
780 __ Mov(Register(reg), Register(tmp));
781 }
782 } else if (is_slot1 && is_slot2) {
783 MemOperand mem1 = StackOperandFrom(loc1);
784 MemOperand mem2 = StackOperandFrom(loc2);
785 Register tmp1 = loc1.IsStackSlot() ? temps.AcquireW() : temps.AcquireX();
786 Register tmp2 = temps.AcquireSameSizeAs(tmp1);
787 __ Ldr(tmp1, mem1);
788 __ Ldr(tmp2, mem2);
789 __ Str(tmp1, mem2);
790 __ Str(tmp2, mem1);
791 } else {
792 LOG(FATAL) << "Unimplemented";
793 }
794}
795
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000796void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000797 CPURegister dst,
798 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000799 switch (type) {
800 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +0000801 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000802 break;
803 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +0000804 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000805 break;
806 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +0000807 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000808 break;
809 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +0000810 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000811 break;
812 case Primitive::kPrimInt:
813 case Primitive::kPrimNot:
814 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000815 case Primitive::kPrimFloat:
816 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000817 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +0000818 __ Ldr(dst, src);
819 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000820 case Primitive::kPrimVoid:
821 LOG(FATAL) << "Unreachable type " << type;
822 }
823}
824
Calin Juravle77520bc2015-01-12 18:45:46 +0000825void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000826 CPURegister dst,
827 const MemOperand& src) {
828 UseScratchRegisterScope temps(GetVIXLAssembler());
829 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +0000830 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000831
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000832 DCHECK(!src.IsPreIndex());
833 DCHECK(!src.IsPostIndex());
834
835 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800836 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000837 MemOperand base = MemOperand(temp_base);
838 switch (type) {
839 case Primitive::kPrimBoolean:
840 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000841 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000842 break;
843 case Primitive::kPrimByte:
844 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000845 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000846 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
847 break;
848 case Primitive::kPrimChar:
849 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000850 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000851 break;
852 case Primitive::kPrimShort:
853 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000854 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000855 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
856 break;
857 case Primitive::kPrimInt:
858 case Primitive::kPrimNot:
859 case Primitive::kPrimLong:
860 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
861 __ Ldar(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000862 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000863 break;
864 case Primitive::kPrimFloat:
865 case Primitive::kPrimDouble: {
866 DCHECK(dst.IsFPRegister());
867 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
868
869 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
870 __ Ldar(temp, base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000871 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000872 __ Fmov(FPRegister(dst), temp);
873 break;
874 }
875 case Primitive::kPrimVoid:
876 LOG(FATAL) << "Unreachable type " << type;
877 }
878}
879
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000880void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000881 CPURegister src,
882 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000883 switch (type) {
884 case Primitive::kPrimBoolean:
885 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000886 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000887 break;
888 case Primitive::kPrimChar:
889 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000890 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000891 break;
892 case Primitive::kPrimInt:
893 case Primitive::kPrimNot:
894 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000895 case Primitive::kPrimFloat:
896 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000897 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
898 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000899 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000900 case Primitive::kPrimVoid:
901 LOG(FATAL) << "Unreachable type " << type;
902 }
903}
904
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000905void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
906 CPURegister src,
907 const MemOperand& dst) {
908 UseScratchRegisterScope temps(GetVIXLAssembler());
909 Register temp_base = temps.AcquireX();
910
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000911 DCHECK(!dst.IsPreIndex());
912 DCHECK(!dst.IsPostIndex());
913
914 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800915 Operand op = OperandFromMemOperand(dst);
916 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000917 MemOperand base = MemOperand(temp_base);
918 switch (type) {
919 case Primitive::kPrimBoolean:
920 case Primitive::kPrimByte:
921 __ Stlrb(Register(src), base);
922 break;
923 case Primitive::kPrimChar:
924 case Primitive::kPrimShort:
925 __ Stlrh(Register(src), base);
926 break;
927 case Primitive::kPrimInt:
928 case Primitive::kPrimNot:
929 case Primitive::kPrimLong:
930 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
931 __ Stlr(Register(src), base);
932 break;
933 case Primitive::kPrimFloat:
934 case Primitive::kPrimDouble: {
935 DCHECK(src.IsFPRegister());
936 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
937
938 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
939 __ Fmov(temp, FPRegister(src));
940 __ Stlr(temp, base);
941 break;
942 }
943 case Primitive::kPrimVoid:
944 LOG(FATAL) << "Unreachable type " << type;
945 }
946}
947
Alexandre Rames67555f72014-11-18 10:55:16 +0000948void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) {
949 DCHECK(current_method.IsW());
950 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
951}
952
953void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
954 HInstruction* instruction,
955 uint32_t dex_pc) {
956 __ Ldr(lr, MemOperand(tr, entry_point_offset));
957 __ Blr(lr);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000958 if (instruction != nullptr) {
959 RecordPcInfo(instruction, dex_pc);
960 DCHECK(instruction->IsSuspendCheck()
961 || instruction->IsBoundsCheck()
962 || instruction->IsNullCheck()
963 || instruction->IsDivZeroCheck()
964 || !IsLeafMethod());
965 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000966}
967
968void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
969 vixl::Register class_reg) {
970 UseScratchRegisterScope temps(GetVIXLAssembler());
971 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000972 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
973
Serban Constantinescu02164b32014-11-13 14:05:07 +0000974 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000975 if (kUseAcquireRelease) {
976 // TODO(vixl): Let the MacroAssembler handle MemOperand.
977 __ Add(temp, class_reg, status_offset);
978 __ Ldar(temp, HeapOperand(temp));
979 __ Cmp(temp, mirror::Class::kStatusInitialized);
980 __ B(lt, slow_path->GetEntryLabel());
981 } else {
982 __ Ldr(temp, HeapOperand(class_reg, status_offset));
983 __ Cmp(temp, mirror::Class::kStatusInitialized);
984 __ B(lt, slow_path->GetEntryLabel());
985 __ Dmb(InnerShareable, BarrierReads);
986 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000987 __ Bind(slow_path->GetExitLabel());
988}
Alexandre Rames5319def2014-10-23 10:03:10 +0100989
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000990void InstructionCodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
991 BarrierType type = BarrierAll;
992
993 switch (kind) {
994 case MemBarrierKind::kAnyAny:
995 case MemBarrierKind::kAnyStore: {
996 type = BarrierAll;
997 break;
998 }
999 case MemBarrierKind::kLoadAny: {
1000 type = BarrierReads;
1001 break;
1002 }
1003 case MemBarrierKind::kStoreStore: {
1004 type = BarrierWrites;
1005 break;
1006 }
1007 default:
1008 LOG(FATAL) << "Unexpected memory barrier " << kind;
1009 }
1010 __ Dmb(InnerShareable, type);
1011}
1012
Serban Constantinescu02164b32014-11-13 14:05:07 +00001013void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1014 HBasicBlock* successor) {
1015 SuspendCheckSlowPathARM64* slow_path =
1016 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1017 codegen_->AddSlowPath(slow_path);
1018 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1019 Register temp = temps.AcquireW();
1020
1021 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1022 if (successor == nullptr) {
1023 __ Cbnz(temp, slow_path->GetEntryLabel());
1024 __ Bind(slow_path->GetReturnLabel());
1025 } else {
1026 __ Cbz(temp, codegen_->GetLabelOf(successor));
1027 __ B(slow_path->GetEntryLabel());
1028 // slow_path will return to GetLabelOf(successor).
1029 }
1030}
1031
Alexandre Rames5319def2014-10-23 10:03:10 +01001032InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1033 CodeGeneratorARM64* codegen)
1034 : HGraphVisitor(graph),
1035 assembler_(codegen->GetAssembler()),
1036 codegen_(codegen) {}
1037
1038#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001039 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001040
1041#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1042
1043enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001044 // Using a base helps identify when we hit such breakpoints.
1045 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001046#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1047 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1048#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1049};
1050
1051#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
1052 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001053 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +01001054 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1055 } \
1056 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1057 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1058 locations->SetOut(Location::Any()); \
1059 }
1060 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1061#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1062
1063#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001064#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001065
Alexandre Rames67555f72014-11-18 10:55:16 +00001066void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001067 DCHECK_EQ(instr->InputCount(), 2U);
1068 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1069 Primitive::Type type = instr->GetResultType();
1070 switch (type) {
1071 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001072 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001073 locations->SetInAt(0, Location::RequiresRegister());
1074 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001075 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001076 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001077
1078 case Primitive::kPrimFloat:
1079 case Primitive::kPrimDouble:
1080 locations->SetInAt(0, Location::RequiresFpuRegister());
1081 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001082 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001083 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001084
Alexandre Rames5319def2014-10-23 10:03:10 +01001085 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001086 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001087 }
1088}
1089
Alexandre Rames67555f72014-11-18 10:55:16 +00001090void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001091 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001092
1093 switch (type) {
1094 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001095 case Primitive::kPrimLong: {
1096 Register dst = OutputRegister(instr);
1097 Register lhs = InputRegisterAt(instr, 0);
1098 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001099 if (instr->IsAdd()) {
1100 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001101 } else if (instr->IsAnd()) {
1102 __ And(dst, lhs, rhs);
1103 } else if (instr->IsOr()) {
1104 __ Orr(dst, lhs, rhs);
1105 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001106 __ Sub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001107 } else {
1108 DCHECK(instr->IsXor());
1109 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001110 }
1111 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001112 }
1113 case Primitive::kPrimFloat:
1114 case Primitive::kPrimDouble: {
1115 FPRegister dst = OutputFPRegister(instr);
1116 FPRegister lhs = InputFPRegisterAt(instr, 0);
1117 FPRegister rhs = InputFPRegisterAt(instr, 1);
1118 if (instr->IsAdd()) {
1119 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001120 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001121 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001122 } else {
1123 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001124 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001125 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001126 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001127 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001128 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001129 }
1130}
1131
Serban Constantinescu02164b32014-11-13 14:05:07 +00001132void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1133 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1134
1135 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1136 Primitive::Type type = instr->GetResultType();
1137 switch (type) {
1138 case Primitive::kPrimInt:
1139 case Primitive::kPrimLong: {
1140 locations->SetInAt(0, Location::RequiresRegister());
1141 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1142 locations->SetOut(Location::RequiresRegister());
1143 break;
1144 }
1145 default:
1146 LOG(FATAL) << "Unexpected shift type " << type;
1147 }
1148}
1149
1150void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1151 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1152
1153 Primitive::Type type = instr->GetType();
1154 switch (type) {
1155 case Primitive::kPrimInt:
1156 case Primitive::kPrimLong: {
1157 Register dst = OutputRegister(instr);
1158 Register lhs = InputRegisterAt(instr, 0);
1159 Operand rhs = InputOperandAt(instr, 1);
1160 if (rhs.IsImmediate()) {
1161 uint32_t shift_value = (type == Primitive::kPrimInt)
1162 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1163 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1164 if (instr->IsShl()) {
1165 __ Lsl(dst, lhs, shift_value);
1166 } else if (instr->IsShr()) {
1167 __ Asr(dst, lhs, shift_value);
1168 } else {
1169 __ Lsr(dst, lhs, shift_value);
1170 }
1171 } else {
1172 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1173
1174 if (instr->IsShl()) {
1175 __ Lsl(dst, lhs, rhs_reg);
1176 } else if (instr->IsShr()) {
1177 __ Asr(dst, lhs, rhs_reg);
1178 } else {
1179 __ Lsr(dst, lhs, rhs_reg);
1180 }
1181 }
1182 break;
1183 }
1184 default:
1185 LOG(FATAL) << "Unexpected shift operation type " << type;
1186 }
1187}
1188
Alexandre Rames5319def2014-10-23 10:03:10 +01001189void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001190 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001191}
1192
1193void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001194 HandleBinaryOp(instruction);
1195}
1196
1197void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1198 HandleBinaryOp(instruction);
1199}
1200
1201void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1202 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001203}
1204
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001205void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1206 LocationSummary* locations =
1207 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1208 locations->SetInAt(0, Location::RequiresRegister());
1209 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1210 locations->SetOut(Location::RequiresRegister());
1211}
1212
1213void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1214 LocationSummary* locations = instruction->GetLocations();
1215 Primitive::Type type = instruction->GetType();
1216 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001217 Location index = locations->InAt(1);
1218 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001219 MemOperand source = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001220 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001221
1222 if (index.IsConstant()) {
1223 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001224 source = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001225 } else {
1226 Register temp = temps.AcquireSameSizeAs(obj);
1227 Register index_reg = RegisterFrom(index, Primitive::kPrimInt);
1228 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001229 source = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001230 }
1231
Alexandre Rames67555f72014-11-18 10:55:16 +00001232 codegen_->Load(type, OutputCPURegister(instruction), source);
Calin Juravle77520bc2015-01-12 18:45:46 +00001233 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001234}
1235
Alexandre Rames5319def2014-10-23 10:03:10 +01001236void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1237 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1238 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001239 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001240}
1241
1242void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
1243 __ Ldr(OutputRegister(instruction),
1244 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
Calin Juravle77520bc2015-01-12 18:45:46 +00001245 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001246}
1247
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001248void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
1249 Primitive::Type value_type = instruction->GetComponentType();
1250 bool is_object = value_type == Primitive::kPrimNot;
1251 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1252 instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
1253 if (is_object) {
1254 InvokeRuntimeCallingConvention calling_convention;
1255 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1256 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1257 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1258 } else {
1259 locations->SetInAt(0, Location::RequiresRegister());
1260 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1261 locations->SetInAt(2, Location::RequiresRegister());
1262 }
1263}
1264
1265void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1266 Primitive::Type value_type = instruction->GetComponentType();
1267 if (value_type == Primitive::kPrimNot) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001268 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001269 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001270 } else {
1271 LocationSummary* locations = instruction->GetLocations();
1272 Register obj = InputRegisterAt(instruction, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001273 CPURegister value = InputCPURegisterAt(instruction, 2);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001274 Location index = locations->InAt(1);
1275 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001276 MemOperand destination = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001277 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001278
1279 if (index.IsConstant()) {
1280 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001281 destination = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001282 } else {
1283 Register temp = temps.AcquireSameSizeAs(obj);
1284 Register index_reg = InputRegisterAt(instruction, 1);
1285 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001286 destination = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001287 }
1288
1289 codegen_->Store(value_type, value, destination);
Calin Juravle77520bc2015-01-12 18:45:46 +00001290 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001291 }
1292}
1293
Alexandre Rames67555f72014-11-18 10:55:16 +00001294void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1295 LocationSummary* locations =
1296 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1297 locations->SetInAt(0, Location::RequiresRegister());
1298 locations->SetInAt(1, Location::RequiresRegister());
1299 if (instruction->HasUses()) {
1300 locations->SetOut(Location::SameAsFirstInput());
1301 }
1302}
1303
1304void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001305 LocationSummary* locations = instruction->GetLocations();
1306 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1307 instruction, locations->InAt(0), locations->InAt(1));
Alexandre Rames67555f72014-11-18 10:55:16 +00001308 codegen_->AddSlowPath(slow_path);
1309
1310 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1311 __ B(slow_path->GetEntryLabel(), hs);
1312}
1313
1314void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
1315 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1316 instruction, LocationSummary::kCallOnSlowPath);
1317 locations->SetInAt(0, Location::RequiresRegister());
1318 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001319 locations->AddTemp(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001320}
1321
1322void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001323 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +00001324 Register obj = InputRegisterAt(instruction, 0);;
1325 Register cls = InputRegisterAt(instruction, 1);;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001326 Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
Alexandre Rames67555f72014-11-18 10:55:16 +00001327
Alexandre Rames3e69f162014-12-10 10:36:50 +00001328 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1329 instruction, locations->InAt(1), LocationFrom(obj_cls), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001330 codegen_->AddSlowPath(slow_path);
1331
1332 // TODO: avoid this check if we know obj is not null.
1333 __ Cbz(obj, slow_path->GetExitLabel());
1334 // Compare the class of `obj` with `cls`.
Alexandre Rames3e69f162014-12-10 10:36:50 +00001335 __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
1336 __ Cmp(obj_cls, cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00001337 __ B(ne, slow_path->GetEntryLabel());
1338 __ Bind(slow_path->GetExitLabel());
1339}
1340
1341void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1342 LocationSummary* locations =
1343 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1344 locations->SetInAt(0, Location::RequiresRegister());
1345 if (check->HasUses()) {
1346 locations->SetOut(Location::SameAsFirstInput());
1347 }
1348}
1349
1350void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1351 // We assume the class is not null.
1352 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1353 check->GetLoadClass(), check, check->GetDexPc(), true);
1354 codegen_->AddSlowPath(slow_path);
1355 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1356}
1357
Serban Constantinescu02164b32014-11-13 14:05:07 +00001358void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001359 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00001360 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1361 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001362 switch (in_type) {
1363 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001364 locations->SetInAt(0, Location::RequiresRegister());
1365 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
1366 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1367 break;
1368 }
1369 case Primitive::kPrimFloat:
1370 case Primitive::kPrimDouble: {
1371 locations->SetInAt(0, Location::RequiresFpuRegister());
1372 locations->SetInAt(1, Location::RequiresFpuRegister());
1373 locations->SetOut(Location::RequiresRegister());
1374 break;
1375 }
1376 default:
1377 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1378 }
1379}
1380
1381void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1382 Primitive::Type in_type = compare->InputAt(0)->GetType();
1383
1384 // 0 if: left == right
1385 // 1 if: left > right
1386 // -1 if: left < right
1387 switch (in_type) {
1388 case Primitive::kPrimLong: {
1389 Register result = OutputRegister(compare);
1390 Register left = InputRegisterAt(compare, 0);
1391 Operand right = InputOperandAt(compare, 1);
1392
1393 __ Cmp(left, right);
1394 __ Cset(result, ne);
1395 __ Cneg(result, result, lt);
1396 break;
1397 }
1398 case Primitive::kPrimFloat:
1399 case Primitive::kPrimDouble: {
1400 Register result = OutputRegister(compare);
1401 FPRegister left = InputFPRegisterAt(compare, 0);
1402 FPRegister right = InputFPRegisterAt(compare, 1);
1403
1404 __ Fcmp(left, right);
1405 if (compare->IsGtBias()) {
1406 __ Cset(result, ne);
1407 } else {
1408 __ Csetm(result, ne);
1409 }
1410 __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
Alexandre Rames5319def2014-10-23 10:03:10 +01001411 break;
1412 }
1413 default:
1414 LOG(FATAL) << "Unimplemented compare type " << in_type;
1415 }
1416}
1417
1418void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1419 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1420 locations->SetInAt(0, Location::RequiresRegister());
1421 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1422 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001423 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001424 }
1425}
1426
1427void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1428 if (!instruction->NeedsMaterialization()) {
1429 return;
1430 }
1431
1432 LocationSummary* locations = instruction->GetLocations();
1433 Register lhs = InputRegisterAt(instruction, 0);
1434 Operand rhs = InputOperandAt(instruction, 1);
1435 Register res = RegisterFrom(locations->Out(), instruction->GetType());
1436 Condition cond = ARM64Condition(instruction->GetCondition());
1437
1438 __ Cmp(lhs, rhs);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001439 __ Cset(res, cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001440}
1441
1442#define FOR_EACH_CONDITION_INSTRUCTION(M) \
1443 M(Equal) \
1444 M(NotEqual) \
1445 M(LessThan) \
1446 M(LessThanOrEqual) \
1447 M(GreaterThan) \
1448 M(GreaterThanOrEqual)
1449#define DEFINE_CONDITION_VISITORS(Name) \
1450void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
1451void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1452FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00001453#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01001454#undef FOR_EACH_CONDITION_INSTRUCTION
1455
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001456void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1457 LocationSummary* locations =
1458 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1459 switch (div->GetResultType()) {
1460 case Primitive::kPrimInt:
1461 case Primitive::kPrimLong:
1462 locations->SetInAt(0, Location::RequiresRegister());
1463 locations->SetInAt(1, Location::RequiresRegister());
1464 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1465 break;
1466
1467 case Primitive::kPrimFloat:
1468 case Primitive::kPrimDouble:
1469 locations->SetInAt(0, Location::RequiresFpuRegister());
1470 locations->SetInAt(1, Location::RequiresFpuRegister());
1471 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1472 break;
1473
1474 default:
1475 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1476 }
1477}
1478
1479void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1480 Primitive::Type type = div->GetResultType();
1481 switch (type) {
1482 case Primitive::kPrimInt:
1483 case Primitive::kPrimLong:
1484 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
1485 break;
1486
1487 case Primitive::kPrimFloat:
1488 case Primitive::kPrimDouble:
1489 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1490 break;
1491
1492 default:
1493 LOG(FATAL) << "Unexpected div type " << type;
1494 }
1495}
1496
Alexandre Rames67555f72014-11-18 10:55:16 +00001497void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1498 LocationSummary* locations =
1499 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1500 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1501 if (instruction->HasUses()) {
1502 locations->SetOut(Location::SameAsFirstInput());
1503 }
1504}
1505
1506void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1507 SlowPathCodeARM64* slow_path =
1508 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
1509 codegen_->AddSlowPath(slow_path);
1510 Location value = instruction->GetLocations()->InAt(0);
1511
Alexandre Rames3e69f162014-12-10 10:36:50 +00001512 Primitive::Type type = instruction->GetType();
1513
1514 if ((type != Primitive::kPrimInt) && (type != Primitive::kPrimLong)) {
1515 LOG(FATAL) << "Unexpected type " << type << "for DivZeroCheck.";
1516 return;
1517 }
1518
Alexandre Rames67555f72014-11-18 10:55:16 +00001519 if (value.IsConstant()) {
1520 int64_t divisor = Int64ConstantFrom(value);
1521 if (divisor == 0) {
1522 __ B(slow_path->GetEntryLabel());
1523 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001524 // A division by a non-null constant is valid. We don't need to perform
1525 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00001526 }
1527 } else {
1528 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
1529 }
1530}
1531
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001532void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1533 LocationSummary* locations =
1534 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1535 locations->SetOut(Location::ConstantLocation(constant));
1536}
1537
1538void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1539 UNUSED(constant);
1540 // Will be generated at use site.
1541}
1542
Alexandre Rames5319def2014-10-23 10:03:10 +01001543void LocationsBuilderARM64::VisitExit(HExit* exit) {
1544 exit->SetLocations(nullptr);
1545}
1546
1547void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001548 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +01001549 if (kIsDebugBuild) {
1550 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
Alexandre Rames67555f72014-11-18 10:55:16 +00001551 __ Brk(__LINE__); // TODO: Introduce special markers for such code locations.
Alexandre Rames5319def2014-10-23 10:03:10 +01001552 }
1553}
1554
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001555void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
1556 LocationSummary* locations =
1557 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1558 locations->SetOut(Location::ConstantLocation(constant));
1559}
1560
1561void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
1562 UNUSED(constant);
1563 // Will be generated at use site.
1564}
1565
Alexandre Rames5319def2014-10-23 10:03:10 +01001566void LocationsBuilderARM64::VisitGoto(HGoto* got) {
1567 got->SetLocations(nullptr);
1568}
1569
1570void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
1571 HBasicBlock* successor = got->GetSuccessor();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001572 DCHECK(!successor->IsExitBlock());
1573 HBasicBlock* block = got->GetBlock();
1574 HInstruction* previous = got->GetPrevious();
1575 HLoopInformation* info = block->GetLoopInformation();
1576
1577 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
1578 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
1579 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1580 return;
1581 }
1582 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1583 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1584 }
1585 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001586 __ B(codegen_->GetLabelOf(successor));
1587 }
1588}
1589
1590void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
1591 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1592 HInstruction* cond = if_instr->InputAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001593 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001594 locations->SetInAt(0, Location::RequiresRegister());
1595 }
1596}
1597
1598void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
1599 HInstruction* cond = if_instr->InputAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001600 HCondition* condition = cond->AsCondition();
1601 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1602 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1603
Serban Constantinescu02164b32014-11-13 14:05:07 +00001604 if (cond->IsIntConstant()) {
1605 int32_t cond_value = cond->AsIntConstant()->GetValue();
1606 if (cond_value == 1) {
1607 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfTrueSuccessor())) {
1608 __ B(true_target);
1609 }
1610 return;
1611 } else {
1612 DCHECK_EQ(cond_value, 0);
1613 }
1614 } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001615 // The condition instruction has been materialized, compare the output to 0.
1616 Location cond_val = if_instr->GetLocations()->InAt(0);
1617 DCHECK(cond_val.IsRegister());
1618 __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001619 } else {
1620 // The condition instruction has not been materialized, use its inputs as
1621 // the comparison and its condition as the branch condition.
1622 Register lhs = InputRegisterAt(condition, 0);
1623 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001624 Condition arm64_cond = ARM64Condition(condition->GetCondition());
1625 if ((arm64_cond == eq || arm64_cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
1626 if (arm64_cond == eq) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001627 __ Cbz(lhs, true_target);
1628 } else {
1629 __ Cbnz(lhs, true_target);
1630 }
1631 } else {
1632 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001633 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001634 }
1635 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001636 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
1637 __ B(false_target);
1638 }
1639}
1640
1641void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001642 LocationSummary* locations =
1643 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001644 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001645 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001646}
1647
1648void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001649 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001650
1651 if (instruction->IsVolatile()) {
1652 if (kUseAcquireRelease) {
Calin Juravle77520bc2015-01-12 18:45:46 +00001653 // NB: LoadAcquire will record the pc info if needed.
1654 codegen_->LoadAcquire(instruction, OutputCPURegister(instruction), field);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001655 } else {
1656 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
Calin Juravle77520bc2015-01-12 18:45:46 +00001657 codegen_->MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001658 // For IRIW sequential consistency kLoadAny is not sufficient.
1659 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1660 }
1661 } else {
1662 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
Calin Juravle77520bc2015-01-12 18:45:46 +00001663 codegen_->MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001664 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001665}
1666
1667void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001668 LocationSummary* locations =
1669 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001670 locations->SetInAt(0, Location::RequiresRegister());
1671 locations->SetInAt(1, Location::RequiresRegister());
1672}
1673
1674void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001675 Register obj = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001676 CPURegister value = InputCPURegisterAt(instruction, 1);
1677 Offset offset = instruction->GetFieldOffset();
1678 Primitive::Type field_type = instruction->GetFieldType();
1679
1680 if (instruction->IsVolatile()) {
1681 if (kUseAcquireRelease) {
1682 codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00001683 codegen_->MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001684 } else {
1685 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
1686 codegen_->Store(field_type, value, HeapOperand(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00001687 codegen_->MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001688 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1689 }
1690 } else {
1691 codegen_->Store(field_type, value, HeapOperand(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00001692 codegen_->MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001693 }
1694
1695 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001696 codegen_->MarkGCCard(obj, Register(value));
Alexandre Rames5319def2014-10-23 10:03:10 +01001697 }
1698}
1699
Alexandre Rames67555f72014-11-18 10:55:16 +00001700void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
1701 LocationSummary::CallKind call_kind =
1702 instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
1703 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
1704 locations->SetInAt(0, Location::RequiresRegister());
1705 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001706 // The output does overlap inputs.
1707 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexandre Rames67555f72014-11-18 10:55:16 +00001708}
1709
1710void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
1711 LocationSummary* locations = instruction->GetLocations();
1712 Register obj = InputRegisterAt(instruction, 0);;
1713 Register cls = InputRegisterAt(instruction, 1);;
1714 Register out = OutputRegister(instruction);
1715
1716 vixl::Label done;
1717
1718 // Return 0 if `obj` is null.
1719 // TODO: Avoid this check if we know `obj` is not null.
1720 __ Mov(out, 0);
1721 __ Cbz(obj, &done);
1722
1723 // Compare the class of `obj` with `cls`.
Serban Constantinescu02164b32014-11-13 14:05:07 +00001724 __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
Alexandre Rames67555f72014-11-18 10:55:16 +00001725 __ Cmp(out, cls);
1726 if (instruction->IsClassFinal()) {
1727 // Classes must be equal for the instanceof to succeed.
1728 __ Cset(out, eq);
1729 } else {
1730 // If the classes are not equal, we go into a slow path.
1731 DCHECK(locations->OnlyCallsOnSlowPath());
1732 SlowPathCodeARM64* slow_path =
Alexandre Rames3e69f162014-12-10 10:36:50 +00001733 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1734 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001735 codegen_->AddSlowPath(slow_path);
1736 __ B(ne, slow_path->GetEntryLabel());
1737 __ Mov(out, 1);
1738 __ Bind(slow_path->GetExitLabel());
1739 }
1740
1741 __ Bind(&done);
1742}
1743
Alexandre Rames5319def2014-10-23 10:03:10 +01001744void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
1745 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1746 locations->SetOut(Location::ConstantLocation(constant));
1747}
1748
1749void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
1750 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001751 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001752}
1753
Alexandre Rames5319def2014-10-23 10:03:10 +01001754void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
1755 LocationSummary* locations =
1756 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1757 locations->AddTemp(LocationFrom(x0));
1758
1759 InvokeDexCallingConventionVisitor calling_convention_visitor;
1760 for (size_t i = 0; i < invoke->InputCount(); i++) {
1761 HInstruction* input = invoke->InputAt(i);
1762 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1763 }
1764
1765 Primitive::Type return_type = invoke->GetType();
1766 if (return_type != Primitive::kPrimVoid) {
1767 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
1768 }
1769}
1770
Alexandre Rames67555f72014-11-18 10:55:16 +00001771void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1772 HandleInvoke(invoke);
1773}
1774
1775void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1776 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1777 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1778 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1779 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1780 Location receiver = invoke->GetLocations()->InAt(0);
1781 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00001782 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00001783
1784 // The register ip1 is required to be used for the hidden argument in
1785 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
1786 UseScratchRegisterScope scratch_scope(GetVIXLAssembler());
1787 scratch_scope.Exclude(ip1);
1788 __ Mov(ip1, invoke->GetDexMethodIndex());
1789
1790 // temp = object->GetClass();
1791 if (receiver.IsStackSlot()) {
1792 __ Ldr(temp, StackOperandFrom(receiver));
1793 __ Ldr(temp, HeapOperand(temp, class_offset));
1794 } else {
1795 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
1796 }
Calin Juravle77520bc2015-01-12 18:45:46 +00001797 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00001798 // temp = temp->GetImtEntryAt(method_offset);
1799 __ Ldr(temp, HeapOperand(temp, method_offset));
1800 // lr = temp->GetEntryPoint();
1801 __ Ldr(lr, HeapOperand(temp, entry_point));
1802 // lr();
1803 __ Blr(lr);
1804 DCHECK(!codegen_->IsLeafMethod());
1805 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1806}
1807
1808void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001809 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
1810 if (intrinsic.TryDispatch(invoke)) {
1811 return;
1812 }
1813
Alexandre Rames67555f72014-11-18 10:55:16 +00001814 HandleInvoke(invoke);
1815}
1816
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001817void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001818 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
1819 if (intrinsic.TryDispatch(invoke)) {
1820 return;
1821 }
1822
Alexandre Rames67555f72014-11-18 10:55:16 +00001823 HandleInvoke(invoke);
1824}
1825
Andreas Gampe878d58c2015-01-15 23:24:00 -08001826static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
1827 if (invoke->GetLocations()->Intrinsified()) {
1828 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
1829 intrinsic.Dispatch(invoke);
1830 return true;
1831 }
1832 return false;
1833}
1834
1835void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
1836 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
1837 DCHECK(temp.Is(kArtMethodRegister));
Alexandre Rames5319def2014-10-23 10:03:10 +01001838 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
Andreas Gampe878d58c2015-01-15 23:24:00 -08001839 invoke->GetDexMethodIndex() * kHeapRefSize;
Alexandre Rames5319def2014-10-23 10:03:10 +01001840
1841 // TODO: Implement all kinds of calls:
1842 // 1) boot -> boot
1843 // 2) app -> boot
1844 // 3) app -> app
1845 //
1846 // Currently we implement the app -> app logic, which looks up in the resolve cache.
1847
1848 // temp = method;
Andreas Gampe878d58c2015-01-15 23:24:00 -08001849 LoadCurrentMethod(temp);
Nicolas Geoffray4e44c822014-12-17 12:25:12 +00001850 // temp = temp->dex_cache_resolved_methods_;
1851 __ Ldr(temp, HeapOperand(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset()));
1852 // temp = temp[index_in_cache];
1853 __ Ldr(temp, HeapOperand(temp, index_in_cache));
Alexandre Rames5319def2014-10-23 10:03:10 +01001854 // lr = temp->entry_point_from_quick_compiled_code_;
Serban Constantinescu02164b32014-11-13 14:05:07 +00001855 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe878d58c2015-01-15 23:24:00 -08001856 kArm64WordSize)));
Alexandre Rames5319def2014-10-23 10:03:10 +01001857 // lr();
1858 __ Blr(lr);
1859
Andreas Gampe878d58c2015-01-15 23:24:00 -08001860 RecordPcInfo(invoke, invoke->GetDexPc());
1861 DCHECK(!IsLeafMethod());
1862}
1863
1864void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1865 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1866 return;
1867 }
1868
1869 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1870 codegen_->GenerateStaticOrDirectCall(invoke, temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01001871}
1872
1873void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001874 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1875 return;
1876 }
1877
Alexandre Rames5319def2014-10-23 10:03:10 +01001878 LocationSummary* locations = invoke->GetLocations();
1879 Location receiver = locations->InAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001880 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01001881 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
1882 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1883 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00001884 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames5319def2014-10-23 10:03:10 +01001885
1886 // temp = object->GetClass();
1887 if (receiver.IsStackSlot()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001888 __ Ldr(temp, MemOperand(sp, receiver.GetStackIndex()));
1889 __ Ldr(temp, HeapOperand(temp, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01001890 } else {
1891 DCHECK(receiver.IsRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00001892 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01001893 }
Calin Juravle77520bc2015-01-12 18:45:46 +00001894 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames5319def2014-10-23 10:03:10 +01001895 // temp = temp->GetMethodAt(method_offset);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001896 __ Ldr(temp, HeapOperand(temp, method_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01001897 // lr = temp->GetEntryPoint();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001898 __ Ldr(lr, HeapOperand(temp, entry_point.SizeValue()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001899 // lr();
1900 __ Blr(lr);
1901 DCHECK(!codegen_->IsLeafMethod());
1902 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1903}
1904
Alexandre Rames67555f72014-11-18 10:55:16 +00001905void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
1906 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
1907 : LocationSummary::kNoCall;
1908 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
1909 locations->SetOut(Location::RequiresRegister());
1910}
1911
1912void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
1913 Register out = OutputRegister(cls);
1914 if (cls->IsReferrersClass()) {
1915 DCHECK(!cls->CanCallRuntime());
1916 DCHECK(!cls->MustGenerateClinitCheck());
1917 codegen_->LoadCurrentMethod(out);
1918 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
1919 } else {
1920 DCHECK(cls->CanCallRuntime());
1921 codegen_->LoadCurrentMethod(out);
1922 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001923 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00001924
1925 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1926 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
1927 codegen_->AddSlowPath(slow_path);
1928 __ Cbz(out, slow_path->GetEntryLabel());
1929 if (cls->MustGenerateClinitCheck()) {
1930 GenerateClassInitializationCheck(slow_path, out);
1931 } else {
1932 __ Bind(slow_path->GetExitLabel());
1933 }
1934 }
1935}
1936
1937void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
1938 LocationSummary* locations =
1939 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
1940 locations->SetOut(Location::RequiresRegister());
1941}
1942
1943void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
1944 MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
1945 __ Ldr(OutputRegister(instruction), exception);
1946 __ Str(wzr, exception);
1947}
1948
Alexandre Rames5319def2014-10-23 10:03:10 +01001949void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
1950 load->SetLocations(nullptr);
1951}
1952
1953void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
1954 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001955 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01001956}
1957
Alexandre Rames67555f72014-11-18 10:55:16 +00001958void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
1959 LocationSummary* locations =
1960 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
1961 locations->SetOut(Location::RequiresRegister());
1962}
1963
1964void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
1965 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
1966 codegen_->AddSlowPath(slow_path);
1967
1968 Register out = OutputRegister(load);
1969 codegen_->LoadCurrentMethod(out);
Mathieu Chartiereace4582014-11-24 18:29:54 -08001970 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
1971 __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001972 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00001973 __ Cbz(out, slow_path->GetEntryLabel());
1974 __ Bind(slow_path->GetExitLabel());
1975}
1976
Alexandre Rames5319def2014-10-23 10:03:10 +01001977void LocationsBuilderARM64::VisitLocal(HLocal* local) {
1978 local->SetLocations(nullptr);
1979}
1980
1981void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
1982 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1983}
1984
1985void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
1986 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1987 locations->SetOut(Location::ConstantLocation(constant));
1988}
1989
1990void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
1991 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001992 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001993}
1994
Alexandre Rames67555f72014-11-18 10:55:16 +00001995void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
1996 LocationSummary* locations =
1997 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1998 InvokeRuntimeCallingConvention calling_convention;
1999 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2000}
2001
2002void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2003 codegen_->InvokeRuntime(instruction->IsEnter()
2004 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2005 instruction,
2006 instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002007 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002008}
2009
Alexandre Rames42d641b2014-10-27 14:00:51 +00002010void LocationsBuilderARM64::VisitMul(HMul* mul) {
2011 LocationSummary* locations =
2012 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2013 switch (mul->GetResultType()) {
2014 case Primitive::kPrimInt:
2015 case Primitive::kPrimLong:
2016 locations->SetInAt(0, Location::RequiresRegister());
2017 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002018 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002019 break;
2020
2021 case Primitive::kPrimFloat:
2022 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002023 locations->SetInAt(0, Location::RequiresFpuRegister());
2024 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002025 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002026 break;
2027
2028 default:
2029 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2030 }
2031}
2032
2033void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
2034 switch (mul->GetResultType()) {
2035 case Primitive::kPrimInt:
2036 case Primitive::kPrimLong:
2037 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2038 break;
2039
2040 case Primitive::kPrimFloat:
2041 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002042 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00002043 break;
2044
2045 default:
2046 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2047 }
2048}
2049
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002050void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
2051 LocationSummary* locations =
2052 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2053 switch (neg->GetResultType()) {
2054 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00002055 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002056 locations->SetInAt(0, Location::RegisterOrConstant(neg->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00002057 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002058 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002059
2060 case Primitive::kPrimFloat:
2061 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002062 locations->SetInAt(0, Location::RequiresFpuRegister());
2063 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002064 break;
2065
2066 default:
2067 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2068 }
2069}
2070
2071void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
2072 switch (neg->GetResultType()) {
2073 case Primitive::kPrimInt:
2074 case Primitive::kPrimLong:
2075 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
2076 break;
2077
2078 case Primitive::kPrimFloat:
2079 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002080 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002081 break;
2082
2083 default:
2084 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2085 }
2086}
2087
2088void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
2089 LocationSummary* locations =
2090 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2091 InvokeRuntimeCallingConvention calling_convention;
2092 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002093 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002094 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002095 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2096 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2097 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002098}
2099
2100void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
2101 LocationSummary* locations = instruction->GetLocations();
2102 InvokeRuntimeCallingConvention calling_convention;
2103 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2104 DCHECK(type_index.Is(w0));
2105 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002106 DCHECK(current_method.Is(w2));
Alexandre Rames67555f72014-11-18 10:55:16 +00002107 codegen_->LoadCurrentMethod(current_method);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002108 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002109 codegen_->InvokeRuntime(
2110 QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002111 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2112 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002113}
2114
Alexandre Rames5319def2014-10-23 10:03:10 +01002115void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
2116 LocationSummary* locations =
2117 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2118 InvokeRuntimeCallingConvention calling_convention;
2119 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2120 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2121 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002122 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002123}
2124
2125void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
2126 LocationSummary* locations = instruction->GetLocations();
2127 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2128 DCHECK(type_index.Is(w0));
2129 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2130 DCHECK(current_method.Is(w1));
Alexandre Rames67555f72014-11-18 10:55:16 +00002131 codegen_->LoadCurrentMethod(current_method);
Alexandre Rames5319def2014-10-23 10:03:10 +01002132 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002133 codegen_->InvokeRuntime(
2134 QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002135 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002136}
2137
2138void LocationsBuilderARM64::VisitNot(HNot* instruction) {
2139 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00002140 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002141 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002142}
2143
2144void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
2145 switch (instruction->InputAt(0)->GetType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002146 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002147 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01002148 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002149 break;
2150
2151 default:
2152 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
2153 }
2154}
2155
2156void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
2157 LocationSummary* locations =
2158 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2159 locations->SetInAt(0, Location::RequiresRegister());
2160 if (instruction->HasUses()) {
2161 locations->SetOut(Location::SameAsFirstInput());
2162 }
2163}
2164
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002165void InstructionCodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00002166 if (codegen_->CanMoveNullCheckToUser(instruction)) {
2167 return;
2168 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002169 Location obj = instruction->GetLocations()->InAt(0);
2170
2171 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
2172 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2173}
2174
2175void InstructionCodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002176 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
2177 codegen_->AddSlowPath(slow_path);
2178
2179 LocationSummary* locations = instruction->GetLocations();
2180 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00002181
2182 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01002183}
2184
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002185void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
2186 if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2187 GenerateImplicitNullCheck(instruction);
2188 } else {
2189 GenerateExplicitNullCheck(instruction);
2190 }
2191}
2192
Alexandre Rames67555f72014-11-18 10:55:16 +00002193void LocationsBuilderARM64::VisitOr(HOr* instruction) {
2194 HandleBinaryOp(instruction);
2195}
2196
2197void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
2198 HandleBinaryOp(instruction);
2199}
2200
Alexandre Rames3e69f162014-12-10 10:36:50 +00002201void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2202 LOG(FATAL) << "Unreachable";
2203}
2204
2205void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
2206 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2207}
2208
Alexandre Rames5319def2014-10-23 10:03:10 +01002209void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
2210 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2211 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2212 if (location.IsStackSlot()) {
2213 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2214 } else if (location.IsDoubleStackSlot()) {
2215 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2216 }
2217 locations->SetOut(location);
2218}
2219
2220void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
2221 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002222 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002223}
2224
2225void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
2226 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2227 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2228 locations->SetInAt(i, Location::Any());
2229 }
2230 locations->SetOut(Location::Any());
2231}
2232
2233void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002234 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002235 LOG(FATAL) << "Unreachable";
2236}
2237
Serban Constantinescu02164b32014-11-13 14:05:07 +00002238void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002239 Primitive::Type type = rem->GetResultType();
2240 LocationSummary::CallKind call_kind = IsFPType(type) ? LocationSummary::kCall
2241 : LocationSummary::kNoCall;
2242 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2243
2244 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002245 case Primitive::kPrimInt:
2246 case Primitive::kPrimLong:
2247 locations->SetInAt(0, Location::RequiresRegister());
2248 locations->SetInAt(1, Location::RequiresRegister());
2249 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2250 break;
2251
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002252 case Primitive::kPrimFloat:
2253 case Primitive::kPrimDouble: {
2254 InvokeRuntimeCallingConvention calling_convention;
2255 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
2256 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
2257 locations->SetOut(calling_convention.GetReturnLocation(type));
2258
2259 break;
2260 }
2261
Serban Constantinescu02164b32014-11-13 14:05:07 +00002262 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002263 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00002264 }
2265}
2266
2267void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
2268 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002269
Serban Constantinescu02164b32014-11-13 14:05:07 +00002270 switch (type) {
2271 case Primitive::kPrimInt:
2272 case Primitive::kPrimLong: {
2273 UseScratchRegisterScope temps(GetVIXLAssembler());
2274 Register dividend = InputRegisterAt(rem, 0);
2275 Register divisor = InputRegisterAt(rem, 1);
2276 Register output = OutputRegister(rem);
2277 Register temp = temps.AcquireSameSizeAs(output);
2278
2279 __ Sdiv(temp, dividend, divisor);
2280 __ Msub(output, temp, divisor, dividend);
2281 break;
2282 }
2283
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002284 case Primitive::kPrimFloat:
2285 case Primitive::kPrimDouble: {
2286 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
2287 : QUICK_ENTRY_POINT(pFmod);
2288 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc());
2289 break;
2290 }
2291
Serban Constantinescu02164b32014-11-13 14:05:07 +00002292 default:
2293 LOG(FATAL) << "Unexpected rem type " << type;
2294 }
2295}
2296
Alexandre Rames5319def2014-10-23 10:03:10 +01002297void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
2298 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2299 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002300 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01002301}
2302
2303void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002304 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002305 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002306 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002307}
2308
2309void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
2310 instruction->SetLocations(nullptr);
2311}
2312
2313void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002314 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002315 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002316 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002317}
2318
Serban Constantinescu02164b32014-11-13 14:05:07 +00002319void LocationsBuilderARM64::VisitShl(HShl* shl) {
2320 HandleShift(shl);
2321}
2322
2323void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
2324 HandleShift(shl);
2325}
2326
2327void LocationsBuilderARM64::VisitShr(HShr* shr) {
2328 HandleShift(shr);
2329}
2330
2331void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
2332 HandleShift(shr);
2333}
2334
Alexandre Rames5319def2014-10-23 10:03:10 +01002335void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
2336 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
2337 Primitive::Type field_type = store->InputAt(1)->GetType();
2338 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002339 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01002340 case Primitive::kPrimBoolean:
2341 case Primitive::kPrimByte:
2342 case Primitive::kPrimChar:
2343 case Primitive::kPrimShort:
2344 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002345 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01002346 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
2347 break;
2348
2349 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002350 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01002351 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
2352 break;
2353
2354 default:
2355 LOG(FATAL) << "Unimplemented local type " << field_type;
2356 }
2357}
2358
2359void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002360 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01002361}
2362
2363void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002364 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002365}
2366
2367void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002368 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002369}
2370
Alexandre Rames67555f72014-11-18 10:55:16 +00002371void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2372 LocationSummary* locations =
2373 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2374 locations->SetInAt(0, Location::RequiresRegister());
2375 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2376}
2377
2378void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002379 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002380
2381 if (instruction->IsVolatile()) {
2382 if (kUseAcquireRelease) {
Calin Juravle77520bc2015-01-12 18:45:46 +00002383 // NB: LoadAcquire will record the pc info if needed.
2384 codegen_->LoadAcquire(instruction, OutputCPURegister(instruction), field);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002385 } else {
2386 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2387 // For IRIW sequential consistency kLoadAny is not sufficient.
2388 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2389 }
2390 } else {
2391 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2392 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002393}
2394
2395void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002396 LocationSummary* locations =
2397 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2398 locations->SetInAt(0, Location::RequiresRegister());
2399 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01002400}
2401
Alexandre Rames67555f72014-11-18 10:55:16 +00002402void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002403 Register cls = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002404 CPURegister value = InputCPURegisterAt(instruction, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002405 Offset offset = instruction->GetFieldOffset();
Alexandre Rames67555f72014-11-18 10:55:16 +00002406 Primitive::Type field_type = instruction->GetFieldType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002407
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002408 if (instruction->IsVolatile()) {
2409 if (kUseAcquireRelease) {
2410 codegen_->StoreRelease(field_type, value, HeapOperand(cls, offset));
2411 } else {
2412 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2413 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2414 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2415 }
2416 } else {
2417 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2418 }
2419
2420 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002421 codegen_->MarkGCCard(cls, Register(value));
2422 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002423}
2424
2425void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2426 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2427}
2428
2429void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002430 HBasicBlock* block = instruction->GetBlock();
2431 if (block->GetLoopInformation() != nullptr) {
2432 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2433 // The back edge will generate the suspend check.
2434 return;
2435 }
2436 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2437 // The goto will generate the suspend check.
2438 return;
2439 }
2440 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01002441}
2442
2443void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
2444 temp->SetLocations(nullptr);
2445}
2446
2447void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
2448 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002449 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01002450}
2451
Alexandre Rames67555f72014-11-18 10:55:16 +00002452void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
2453 LocationSummary* locations =
2454 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2455 InvokeRuntimeCallingConvention calling_convention;
2456 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2457}
2458
2459void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
2460 codegen_->InvokeRuntime(
2461 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002462 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002463}
2464
2465void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
2466 LocationSummary* locations =
2467 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2468 Primitive::Type input_type = conversion->GetInputType();
2469 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002470 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00002471 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
2472 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
2473 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
2474 }
2475
2476 if (IsFPType(input_type)) {
2477 locations->SetInAt(0, Location::RequiresFpuRegister());
2478 } else {
2479 locations->SetInAt(0, Location::RequiresRegister());
2480 }
2481
2482 if (IsFPType(result_type)) {
2483 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2484 } else {
2485 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2486 }
2487}
2488
2489void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
2490 Primitive::Type result_type = conversion->GetResultType();
2491 Primitive::Type input_type = conversion->GetInputType();
2492
2493 DCHECK_NE(input_type, result_type);
2494
2495 if (IsIntegralType(result_type) && IsIntegralType(input_type)) {
2496 int result_size = Primitive::ComponentSize(result_type);
2497 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002498 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002499 Register output = OutputRegister(conversion);
2500 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002501 if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
2502 __ Ubfx(output, source, 0, result_size * kBitsPerByte);
2503 } else if ((result_type == Primitive::kPrimChar) ||
2504 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
2505 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002506 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002507 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002508 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002509 } else if (IsFPType(result_type) && IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002510 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
2511 } else if (IsIntegralType(result_type) && IsFPType(input_type)) {
2512 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
2513 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
2514 } else if (IsFPType(result_type) && IsFPType(input_type)) {
2515 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
2516 } else {
2517 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
2518 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00002519 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002520}
Alexandre Rames67555f72014-11-18 10:55:16 +00002521
Serban Constantinescu02164b32014-11-13 14:05:07 +00002522void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
2523 HandleShift(ushr);
2524}
2525
2526void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
2527 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00002528}
2529
2530void LocationsBuilderARM64::VisitXor(HXor* instruction) {
2531 HandleBinaryOp(instruction);
2532}
2533
2534void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
2535 HandleBinaryOp(instruction);
2536}
2537
2538#undef __
2539#undef QUICK_ENTRY_POINT
2540
Alexandre Rames5319def2014-10-23 10:03:10 +01002541} // namespace arm64
2542} // namespace art