blob: 0909424c143be5492a30c313f6cb071575af51cf [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Andreas Gampe878d58c2015-01-15 23:24:00 -080019#include "common_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010020#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080021#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010022#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080023#include "intrinsics.h"
24#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "mirror/array-inl.h"
26#include "mirror/art_method.h"
27#include "mirror/class.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000028#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010029#include "thread.h"
30#include "utils/arm64/assembler_arm64.h"
31#include "utils/assembler.h"
32#include "utils/stack_checks.h"
33
34
35using namespace vixl; // NOLINT(build/namespaces)
36
37#ifdef __
38#error "ARM64 Codegen VIXL macro-assembler macro already defined."
39#endif
40
Alexandre Rames5319def2014-10-23 10:03:10 +010041namespace art {
42
43namespace arm64 {
44
Andreas Gampe878d58c2015-01-15 23:24:00 -080045using helpers::CPURegisterFrom;
46using helpers::DRegisterFrom;
47using helpers::FPRegisterFrom;
48using helpers::HeapOperand;
49using helpers::HeapOperandFrom;
50using helpers::InputCPURegisterAt;
51using helpers::InputFPRegisterAt;
52using helpers::InputRegisterAt;
53using helpers::InputOperandAt;
54using helpers::Int64ConstantFrom;
55using helpers::Is64BitType;
56using helpers::IsFPType;
57using helpers::IsIntegralType;
58using helpers::LocationFrom;
59using helpers::OperandFromMemOperand;
60using helpers::OutputCPURegister;
61using helpers::OutputFPRegister;
62using helpers::OutputRegister;
63using helpers::RegisterFrom;
64using helpers::StackOperandFrom;
65using helpers::VIXLRegCodeFromART;
66using helpers::WRegisterFrom;
67using helpers::XRegisterFrom;
68
Alexandre Rames5319def2014-10-23 10:03:10 +010069static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
70static constexpr int kCurrentMethodStackOffset = 0;
71
Alexandre Rames5319def2014-10-23 10:03:10 +010072inline Condition ARM64Condition(IfCondition cond) {
73 switch (cond) {
74 case kCondEQ: return eq;
75 case kCondNE: return ne;
76 case kCondLT: return lt;
77 case kCondLE: return le;
78 case kCondGT: return gt;
79 case kCondGE: return ge;
80 default:
81 LOG(FATAL) << "Unknown if condition";
82 }
83 return nv; // Unreachable.
84}
85
Alexandre Ramesa89086e2014-11-07 17:13:25 +000086Location ARM64ReturnLocation(Primitive::Type return_type) {
87 DCHECK_NE(return_type, Primitive::kPrimVoid);
88 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
89 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
90 // but we use the exact registers for clarity.
91 if (return_type == Primitive::kPrimFloat) {
92 return LocationFrom(s0);
93 } else if (return_type == Primitive::kPrimDouble) {
94 return LocationFrom(d0);
95 } else if (return_type == Primitive::kPrimLong) {
96 return LocationFrom(x0);
97 } else {
98 return LocationFrom(w0);
99 }
100}
101
Alexandre Rames5319def2014-10-23 10:03:10 +0100102static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
103static constexpr size_t kRuntimeParameterCoreRegistersLength =
104 arraysize(kRuntimeParameterCoreRegisters);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000105static const FPRegister kRuntimeParameterFpuRegisters[] = { d0, d1, d2, d3, d4, d5, d6, d7 };
106static constexpr size_t kRuntimeParameterFpuRegistersLength =
107 arraysize(kRuntimeParameterCoreRegisters);
Alexandre Rames5319def2014-10-23 10:03:10 +0100108
109class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
110 public:
111 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
112
113 InvokeRuntimeCallingConvention()
114 : CallingConvention(kRuntimeParameterCoreRegisters,
115 kRuntimeParameterCoreRegistersLength,
116 kRuntimeParameterFpuRegisters,
117 kRuntimeParameterFpuRegistersLength) {}
118
119 Location GetReturnLocation(Primitive::Type return_type);
120
121 private:
122 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
123};
124
125Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000126 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100127}
128
Alexandre Rames67555f72014-11-18 10:55:16 +0000129#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
130#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100131
Alexandre Rames5319def2014-10-23 10:03:10 +0100132class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
133 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000134 BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
135 Location index_location,
136 Location length_location)
137 : instruction_(instruction),
138 index_location_(index_location),
139 length_location_(length_location) {}
140
Alexandre Rames5319def2014-10-23 10:03:10 +0100141
Alexandre Rames67555f72014-11-18 10:55:16 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000143 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100144 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000145 // We're moving two locations to locations that could overlap, so we need a parallel
146 // move resolver.
147 InvokeRuntimeCallingConvention calling_convention;
148 codegen->EmitParallelMoves(
149 index_location_, LocationFrom(calling_convention.GetRegisterAt(0)),
150 length_location_, LocationFrom(calling_convention.GetRegisterAt(1)));
151 arm64_codegen->InvokeRuntime(
152 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800153 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100154 }
155
156 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000157 HBoundsCheck* const instruction_;
158 const Location index_location_;
159 const Location length_location_;
160
Alexandre Rames5319def2014-10-23 10:03:10 +0100161 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
162};
163
Alexandre Rames67555f72014-11-18 10:55:16 +0000164class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
165 public:
166 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
167
168 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
169 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
170 __ Bind(GetEntryLabel());
171 arm64_codegen->InvokeRuntime(
172 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800173 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000174 }
175
176 private:
177 HDivZeroCheck* const instruction_;
178 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
179};
180
181class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
182 public:
183 LoadClassSlowPathARM64(HLoadClass* cls,
184 HInstruction* at,
185 uint32_t dex_pc,
186 bool do_clinit)
187 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
188 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
189 }
190
191 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
192 LocationSummary* locations = at_->GetLocations();
193 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
194
195 __ Bind(GetEntryLabel());
196 codegen->SaveLiveRegisters(locations);
197
198 InvokeRuntimeCallingConvention calling_convention;
199 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
200 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
201 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
202 : QUICK_ENTRY_POINT(pInitializeType);
203 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800204 if (do_clinit_) {
205 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t, mirror::ArtMethod*>();
206 } else {
207 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t, mirror::ArtMethod*>();
208 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000209
210 // Move the class to the desired location.
211 Location out = locations->Out();
212 if (out.IsValid()) {
213 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
214 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000215 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000216 }
217
218 codegen->RestoreLiveRegisters(locations);
219 __ B(GetExitLabel());
220 }
221
222 private:
223 // The class this slow path will load.
224 HLoadClass* const cls_;
225
226 // The instruction where this slow path is happening.
227 // (Might be the load class or an initialization check).
228 HInstruction* const at_;
229
230 // The dex PC of `at_`.
231 const uint32_t dex_pc_;
232
233 // Whether to initialize the class.
234 const bool do_clinit_;
235
236 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
237};
238
239class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
240 public:
241 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
242
243 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
244 LocationSummary* locations = instruction_->GetLocations();
245 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
246 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
247
248 __ Bind(GetEntryLabel());
249 codegen->SaveLiveRegisters(locations);
250
251 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800252 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
253 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000254 arm64_codegen->InvokeRuntime(
255 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800256 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000257 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000258 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000259
260 codegen->RestoreLiveRegisters(locations);
261 __ B(GetExitLabel());
262 }
263
264 private:
265 HLoadString* const instruction_;
266
267 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
268};
269
Alexandre Rames5319def2014-10-23 10:03:10 +0100270class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
271 public:
272 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
273
Alexandre Rames67555f72014-11-18 10:55:16 +0000274 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
275 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100276 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000277 arm64_codegen->InvokeRuntime(
278 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800279 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100280 }
281
282 private:
283 HNullCheck* const instruction_;
284
285 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
286};
287
288class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
289 public:
290 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
291 HBasicBlock* successor)
292 : instruction_(instruction), successor_(successor) {}
293
Alexandre Rames67555f72014-11-18 10:55:16 +0000294 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
295 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100296 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000297 codegen->SaveLiveRegisters(instruction_->GetLocations());
298 arm64_codegen->InvokeRuntime(
299 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800300 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000301 codegen->RestoreLiveRegisters(instruction_->GetLocations());
302 if (successor_ == nullptr) {
303 __ B(GetReturnLabel());
304 } else {
305 __ B(arm64_codegen->GetLabelOf(successor_));
306 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100307 }
308
309 vixl::Label* GetReturnLabel() {
310 DCHECK(successor_ == nullptr);
311 return &return_label_;
312 }
313
Alexandre Rames5319def2014-10-23 10:03:10 +0100314 private:
315 HSuspendCheck* const instruction_;
316 // If not null, the block to branch to after the suspend check.
317 HBasicBlock* const successor_;
318
319 // If `successor_` is null, the label to branch to after the suspend check.
320 vixl::Label return_label_;
321
322 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
323};
324
Alexandre Rames67555f72014-11-18 10:55:16 +0000325class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
326 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000327 TypeCheckSlowPathARM64(HInstruction* instruction,
328 Location class_to_check,
329 Location object_class,
330 uint32_t dex_pc)
331 : instruction_(instruction),
332 class_to_check_(class_to_check),
333 object_class_(object_class),
334 dex_pc_(dex_pc) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000335
336 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000337 LocationSummary* locations = instruction_->GetLocations();
338 DCHECK(instruction_->IsCheckCast()
339 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
340 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
341
Alexandre Rames67555f72014-11-18 10:55:16 +0000342 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000343 codegen->SaveLiveRegisters(locations);
344
345 // We're moving two locations to locations that could overlap, so we need a parallel
346 // move resolver.
347 InvokeRuntimeCallingConvention calling_convention;
348 codegen->EmitParallelMoves(
349 class_to_check_, LocationFrom(calling_convention.GetRegisterAt(0)),
350 object_class_, LocationFrom(calling_convention.GetRegisterAt(1)));
351
352 if (instruction_->IsInstanceOf()) {
353 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
354 Primitive::Type ret_type = instruction_->GetType();
355 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
356 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800357 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
358 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000359 } else {
360 DCHECK(instruction_->IsCheckCast());
361 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800362 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000363 }
364
365 codegen->RestoreLiveRegisters(locations);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000366 __ B(GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000367 }
368
369 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000370 HInstruction* const instruction_;
371 const Location class_to_check_;
372 const Location object_class_;
373 uint32_t dex_pc_;
374
Alexandre Rames67555f72014-11-18 10:55:16 +0000375 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
376};
377
Alexandre Rames5319def2014-10-23 10:03:10 +0100378#undef __
379
380Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
381 Location next_location;
382 if (type == Primitive::kPrimVoid) {
383 LOG(FATAL) << "Unreachable type " << type;
384 }
385
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000386 if (IsFPType(type) && (fp_index_ < calling_convention.GetNumberOfFpuRegisters())) {
387 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(fp_index_++));
388 } else if (!IsFPType(type) && (gp_index_ < calling_convention.GetNumberOfRegisters())) {
389 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
390 } else {
391 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
392 next_location = Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
393 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100394 }
395
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000396 // Space on the stack is reserved for all arguments.
397 stack_index_ += Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100398 return next_location;
399}
400
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000401CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph, const CompilerOptions& compiler_options)
Alexandre Rames5319def2014-10-23 10:03:10 +0100402 : CodeGenerator(graph,
403 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000404 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000405 kNumberOfAllocatableRegisterPairs,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000406 (1 << LR),
Nicolas Geoffray98893962015-01-21 12:32:32 +0000407 0,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000408 compiler_options),
Alexandre Rames5319def2014-10-23 10:03:10 +0100409 block_labels_(nullptr),
410 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000411 instruction_visitor_(graph, this),
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000412 move_resolver_(graph->GetArena(), this) {
413 // Save the link register (containing the return address) to mimic Quick.
414 AddAllocatedRegister(Location::RegisterLocation(LR));
415}
Alexandre Rames5319def2014-10-23 10:03:10 +0100416
Alexandre Rames67555f72014-11-18 10:55:16 +0000417#undef __
418#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100419
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000420void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
421 // Ensure we emit the literal pool.
422 __ FinalizeCode();
423 CodeGenerator::Finalize(allocator);
424}
425
Alexandre Rames3e69f162014-12-10 10:36:50 +0000426void ParallelMoveResolverARM64::EmitMove(size_t index) {
427 MoveOperands* move = moves_.Get(index);
428 codegen_->MoveLocation(move->GetDestination(), move->GetSource());
429}
430
431void ParallelMoveResolverARM64::EmitSwap(size_t index) {
432 MoveOperands* move = moves_.Get(index);
433 codegen_->SwapLocations(move->GetDestination(), move->GetSource());
434}
435
436void ParallelMoveResolverARM64::RestoreScratch(int reg) {
437 __ Pop(Register(VIXLRegCodeFromART(reg), kXRegSize));
438}
439
440void ParallelMoveResolverARM64::SpillScratch(int reg) {
441 __ Push(Register(VIXLRegCodeFromART(reg), kXRegSize));
442}
443
Alexandre Rames5319def2014-10-23 10:03:10 +0100444void CodeGeneratorARM64::GenerateFrameEntry() {
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000445 __ Bind(&frame_entry_label_);
446
Serban Constantinescu02164b32014-11-13 14:05:07 +0000447 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
448 if (do_overflow_check) {
449 UseScratchRegisterScope temps(GetVIXLAssembler());
450 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000451 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
452 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
453 __ Ldr(wzr, MemOperand(temp, 0));
454 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000455 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100456
Alexandre Rames5319def2014-10-23 10:03:10 +0100457 int frame_size = GetFrameSize();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800458 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000459 __ PokeCPURegList(GetFramePreservedRegisters(), frame_size - FrameEntrySpillSize());
Alexandre Rames5319def2014-10-23 10:03:10 +0100460
461 // Stack layout:
462 // sp[frame_size - 8] : lr.
463 // ... : other preserved registers.
464 // sp[frame_size - regs_size]: first preserved register.
465 // ... : reserved frame space.
Alexandre Rames67555f72014-11-18 10:55:16 +0000466 // sp[0] : current method.
Alexandre Rames5319def2014-10-23 10:03:10 +0100467}
468
469void CodeGeneratorARM64::GenerateFrameExit() {
470 int frame_size = GetFrameSize();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000471 __ PeekCPURegList(GetFramePreservedRegisters(), frame_size - FrameEntrySpillSize());
Alexandre Rames5319def2014-10-23 10:03:10 +0100472 __ Drop(frame_size);
473}
474
475void CodeGeneratorARM64::Bind(HBasicBlock* block) {
476 __ Bind(GetLabelOf(block));
477}
478
Alexandre Rames5319def2014-10-23 10:03:10 +0100479void CodeGeneratorARM64::Move(HInstruction* instruction,
480 Location location,
481 HInstruction* move_for) {
482 LocationSummary* locations = instruction->GetLocations();
483 if (locations != nullptr && locations->Out().Equals(location)) {
484 return;
485 }
486
487 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000488 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100489
490 if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
491 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
492 : instruction->AsLongConstant()->GetValue();
493 if (location.IsRegister()) {
494 Register dst = RegisterFrom(location, type);
495 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
496 (instruction->IsLongConstant() && dst.Is64Bits()));
497 __ Mov(dst, value);
498 } else {
499 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000500 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100501 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
502 __ Mov(temp, value);
503 __ Str(temp, StackOperandFrom(location));
504 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000505 } else if (instruction->IsTemporary()) {
506 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000507 MoveLocation(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100508 } else if (instruction->IsLoadLocal()) {
509 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000510 if (Is64BitType(type)) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000511 MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000512 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000513 MoveLocation(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100514 }
515
516 } else {
517 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000518 MoveLocation(location, locations->Out(), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100519 }
520}
521
Alexandre Rames5319def2014-10-23 10:03:10 +0100522Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
523 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000524
Alexandre Rames5319def2014-10-23 10:03:10 +0100525 switch (type) {
526 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000527 case Primitive::kPrimInt:
528 case Primitive::kPrimFloat:
529 return Location::StackSlot(GetStackSlot(load->GetLocal()));
530
531 case Primitive::kPrimLong:
532 case Primitive::kPrimDouble:
533 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
534
Alexandre Rames5319def2014-10-23 10:03:10 +0100535 case Primitive::kPrimBoolean:
536 case Primitive::kPrimByte:
537 case Primitive::kPrimChar:
538 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100539 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100540 LOG(FATAL) << "Unexpected type " << type;
541 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000542
Alexandre Rames5319def2014-10-23 10:03:10 +0100543 LOG(FATAL) << "Unreachable";
544 return Location::NoLocation();
545}
546
547void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000548 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100549 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000550 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +0100551 vixl::Label done;
552 __ Cbz(value, &done);
553 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
554 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000555 __ Strb(card, MemOperand(card, temp.X()));
Alexandre Rames5319def2014-10-23 10:03:10 +0100556 __ Bind(&done);
557}
558
Nicolas Geoffray98893962015-01-21 12:32:32 +0000559void CodeGeneratorARM64::SetupBlockedRegisters(bool is_baseline ATTRIBUTE_UNUSED) const {
Alexandre Rames5319def2014-10-23 10:03:10 +0100560 // Block reserved registers:
561 // ip0 (VIXL temporary)
562 // ip1 (VIXL temporary)
Serban Constantinescu02164b32014-11-13 14:05:07 +0000563 // tr
Alexandre Rames5319def2014-10-23 10:03:10 +0100564 // lr
565 // sp is not part of the allocatable registers, so we don't need to block it.
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000566 // TODO: Avoid blocking callee-saved registers, and instead preserve them
567 // where necessary.
Alexandre Rames5319def2014-10-23 10:03:10 +0100568 CPURegList reserved_core_registers = vixl_reserved_core_registers;
569 reserved_core_registers.Combine(runtime_reserved_core_registers);
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000570 reserved_core_registers.Combine(quick_callee_saved_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100571 while (!reserved_core_registers.IsEmpty()) {
572 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
573 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000574 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
575 reserved_fp_registers.Combine(CPURegList::GetCalleeSavedFP());
576 while (!reserved_core_registers.IsEmpty()) {
577 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
578 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100579}
580
581Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
582 if (type == Primitive::kPrimVoid) {
583 LOG(FATAL) << "Unreachable type " << type;
584 }
585
Alexandre Rames5319def2014-10-23 10:03:10 +0100586 if (IsFPType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000587 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
588 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100589 return Location::FpuRegisterLocation(reg);
590 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000591 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
592 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100593 return Location::RegisterLocation(reg);
594 }
595}
596
Alexandre Rames3e69f162014-12-10 10:36:50 +0000597size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
598 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
599 __ Str(reg, MemOperand(sp, stack_index));
600 return kArm64WordSize;
601}
602
603size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
604 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
605 __ Ldr(reg, MemOperand(sp, stack_index));
606 return kArm64WordSize;
607}
608
609size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
610 FPRegister reg = FPRegister(reg_id, kDRegSize);
611 __ Str(reg, MemOperand(sp, stack_index));
612 return kArm64WordSize;
613}
614
615size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
616 FPRegister reg = FPRegister(reg_id, kDRegSize);
617 __ Ldr(reg, MemOperand(sp, stack_index));
618 return kArm64WordSize;
619}
620
Alexandre Rames5319def2014-10-23 10:03:10 +0100621void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
622 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
623}
624
625void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
626 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
627}
628
Alexandre Rames67555f72014-11-18 10:55:16 +0000629void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
630 if (constant->IsIntConstant() || constant->IsLongConstant()) {
631 __ Mov(Register(destination),
632 constant->IsIntConstant() ? constant->AsIntConstant()->GetValue()
633 : constant->AsLongConstant()->GetValue());
634 } else if (constant->IsFloatConstant()) {
635 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
636 } else {
637 DCHECK(constant->IsDoubleConstant());
638 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
639 }
640}
641
Alexandre Rames3e69f162014-12-10 10:36:50 +0000642
643static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
644 DCHECK(constant.IsConstant());
645 HConstant* cst = constant.GetConstant();
646 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
647 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
648 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
649 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
650}
651
652void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000653 if (source.Equals(destination)) {
654 return;
655 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000656
657 // A valid move can always be inferred from the destination and source
658 // locations. When moving from and to a register, the argument type can be
659 // used to generate 32bit instead of 64bit moves. In debug mode we also
660 // checks the coherency of the locations and the type.
661 bool unspecified_type = (type == Primitive::kPrimVoid);
662
663 if (destination.IsRegister() || destination.IsFpuRegister()) {
664 if (unspecified_type) {
665 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
666 if (source.IsStackSlot() ||
667 (src_cst != nullptr && (src_cst->IsIntConstant() || src_cst->IsFloatConstant()))) {
668 // For stack slots and 32bit constants, a 64bit type is appropriate.
669 type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +0000670 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000671 // If the source is a double stack slot or a 64bit constant, a 64bit
672 // type is appropriate. Else the source is a register, and since the
673 // type has not been specified, we chose a 64bit type to force a 64bit
674 // move.
675 type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +0000676 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000677 }
678 DCHECK((destination.IsFpuRegister() && IsFPType(type)) ||
679 (destination.IsRegister() && !IsFPType(type)));
680 CPURegister dst = CPURegisterFrom(destination, type);
681 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
682 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
683 __ Ldr(dst, StackOperandFrom(source));
684 } else if (source.IsConstant()) {
685 DCHECK(CoherentConstantAndType(source, type));
686 MoveConstant(dst, source.GetConstant());
687 } else {
688 if (destination.IsRegister()) {
689 __ Mov(Register(dst), RegisterFrom(source, type));
690 } else {
691 __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
692 }
693 }
694
695 } else { // The destination is not a register. It must be a stack slot.
696 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
697 if (source.IsRegister() || source.IsFpuRegister()) {
698 if (unspecified_type) {
699 if (source.IsRegister()) {
700 type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
701 } else {
702 type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
703 }
704 }
705 DCHECK((destination.IsDoubleStackSlot() == Is64BitType(type)) &&
706 (source.IsFpuRegister() == IsFPType(type)));
707 __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
708 } else if (source.IsConstant()) {
709 DCHECK(unspecified_type || CoherentConstantAndType(source, type));
710 UseScratchRegisterScope temps(GetVIXLAssembler());
711 HConstant* src_cst = source.GetConstant();
712 CPURegister temp;
713 if (src_cst->IsIntConstant()) {
714 temp = temps.AcquireW();
715 } else if (src_cst->IsLongConstant()) {
716 temp = temps.AcquireX();
717 } else if (src_cst->IsFloatConstant()) {
718 temp = temps.AcquireS();
719 } else {
720 DCHECK(src_cst->IsDoubleConstant());
721 temp = temps.AcquireD();
722 }
723 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000724 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000725 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +0000726 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000727 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000728 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000729 // There is generally less pressure on FP registers.
730 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000731 __ Ldr(temp, StackOperandFrom(source));
732 __ Str(temp, StackOperandFrom(destination));
733 }
734 }
735}
736
Alexandre Rames3e69f162014-12-10 10:36:50 +0000737void CodeGeneratorARM64::SwapLocations(Location loc1, Location loc2) {
738 DCHECK(!loc1.IsConstant());
739 DCHECK(!loc2.IsConstant());
740
741 if (loc1.Equals(loc2)) {
742 return;
743 }
744
745 UseScratchRegisterScope temps(GetAssembler()->vixl_masm_);
746
747 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
748 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
749 bool is_fp_reg1 = loc1.IsFpuRegister();
750 bool is_fp_reg2 = loc2.IsFpuRegister();
751
752 if (loc2.IsRegister() && loc1.IsRegister()) {
753 Register r1 = XRegisterFrom(loc1);
754 Register r2 = XRegisterFrom(loc2);
755 Register tmp = temps.AcquireSameSizeAs(r1);
756 __ Mov(tmp, r2);
757 __ Mov(r2, r1);
758 __ Mov(r1, tmp);
759 } else if (is_fp_reg2 && is_fp_reg1) {
760 FPRegister r1 = DRegisterFrom(loc1);
761 FPRegister r2 = DRegisterFrom(loc2);
762 FPRegister tmp = temps.AcquireSameSizeAs(r1);
763 __ Fmov(tmp, r2);
764 __ Fmov(r2, r1);
765 __ Fmov(r1, tmp);
766 } else if (is_slot1 != is_slot2) {
767 MemOperand mem = StackOperandFrom(is_slot1 ? loc1 : loc2);
768 Location reg_loc = is_slot1 ? loc2 : loc1;
769 CPURegister reg, tmp;
770 if (reg_loc.IsFpuRegister()) {
771 reg = DRegisterFrom(reg_loc);
772 tmp = temps.AcquireD();
773 } else {
774 reg = XRegisterFrom(reg_loc);
775 tmp = temps.AcquireX();
776 }
777 __ Ldr(tmp, mem);
778 __ Str(reg, mem);
779 if (reg_loc.IsFpuRegister()) {
780 __ Fmov(FPRegister(reg), FPRegister(tmp));
781 } else {
782 __ Mov(Register(reg), Register(tmp));
783 }
784 } else if (is_slot1 && is_slot2) {
785 MemOperand mem1 = StackOperandFrom(loc1);
786 MemOperand mem2 = StackOperandFrom(loc2);
787 Register tmp1 = loc1.IsStackSlot() ? temps.AcquireW() : temps.AcquireX();
788 Register tmp2 = temps.AcquireSameSizeAs(tmp1);
789 __ Ldr(tmp1, mem1);
790 __ Ldr(tmp2, mem2);
791 __ Str(tmp1, mem2);
792 __ Str(tmp2, mem1);
793 } else {
794 LOG(FATAL) << "Unimplemented";
795 }
796}
797
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000798void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000799 CPURegister dst,
800 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000801 switch (type) {
802 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +0000803 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000804 break;
805 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +0000806 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000807 break;
808 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +0000809 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000810 break;
811 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +0000812 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000813 break;
814 case Primitive::kPrimInt:
815 case Primitive::kPrimNot:
816 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000817 case Primitive::kPrimFloat:
818 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000819 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +0000820 __ Ldr(dst, src);
821 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000822 case Primitive::kPrimVoid:
823 LOG(FATAL) << "Unreachable type " << type;
824 }
825}
826
Calin Juravle77520bc2015-01-12 18:45:46 +0000827void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000828 CPURegister dst,
829 const MemOperand& src) {
830 UseScratchRegisterScope temps(GetVIXLAssembler());
831 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +0000832 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000833
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000834 DCHECK(!src.IsPreIndex());
835 DCHECK(!src.IsPostIndex());
836
837 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800838 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000839 MemOperand base = MemOperand(temp_base);
840 switch (type) {
841 case Primitive::kPrimBoolean:
842 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000843 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000844 break;
845 case Primitive::kPrimByte:
846 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000847 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000848 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
849 break;
850 case Primitive::kPrimChar:
851 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000852 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000853 break;
854 case Primitive::kPrimShort:
855 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000856 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000857 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
858 break;
859 case Primitive::kPrimInt:
860 case Primitive::kPrimNot:
861 case Primitive::kPrimLong:
862 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
863 __ Ldar(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000864 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000865 break;
866 case Primitive::kPrimFloat:
867 case Primitive::kPrimDouble: {
868 DCHECK(dst.IsFPRegister());
869 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
870
871 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
872 __ Ldar(temp, base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000873 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000874 __ Fmov(FPRegister(dst), temp);
875 break;
876 }
877 case Primitive::kPrimVoid:
878 LOG(FATAL) << "Unreachable type " << type;
879 }
880}
881
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000882void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000883 CPURegister src,
884 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000885 switch (type) {
886 case Primitive::kPrimBoolean:
887 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000888 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000889 break;
890 case Primitive::kPrimChar:
891 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000892 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000893 break;
894 case Primitive::kPrimInt:
895 case Primitive::kPrimNot:
896 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000897 case Primitive::kPrimFloat:
898 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000899 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
900 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000901 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000902 case Primitive::kPrimVoid:
903 LOG(FATAL) << "Unreachable type " << type;
904 }
905}
906
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000907void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
908 CPURegister src,
909 const MemOperand& dst) {
910 UseScratchRegisterScope temps(GetVIXLAssembler());
911 Register temp_base = temps.AcquireX();
912
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000913 DCHECK(!dst.IsPreIndex());
914 DCHECK(!dst.IsPostIndex());
915
916 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800917 Operand op = OperandFromMemOperand(dst);
918 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000919 MemOperand base = MemOperand(temp_base);
920 switch (type) {
921 case Primitive::kPrimBoolean:
922 case Primitive::kPrimByte:
923 __ Stlrb(Register(src), base);
924 break;
925 case Primitive::kPrimChar:
926 case Primitive::kPrimShort:
927 __ Stlrh(Register(src), base);
928 break;
929 case Primitive::kPrimInt:
930 case Primitive::kPrimNot:
931 case Primitive::kPrimLong:
932 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
933 __ Stlr(Register(src), base);
934 break;
935 case Primitive::kPrimFloat:
936 case Primitive::kPrimDouble: {
937 DCHECK(src.IsFPRegister());
938 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
939
940 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
941 __ Fmov(temp, FPRegister(src));
942 __ Stlr(temp, base);
943 break;
944 }
945 case Primitive::kPrimVoid:
946 LOG(FATAL) << "Unreachable type " << type;
947 }
948}
949
Alexandre Rames67555f72014-11-18 10:55:16 +0000950void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) {
951 DCHECK(current_method.IsW());
952 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
953}
954
955void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
956 HInstruction* instruction,
957 uint32_t dex_pc) {
958 __ Ldr(lr, MemOperand(tr, entry_point_offset));
959 __ Blr(lr);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000960 if (instruction != nullptr) {
961 RecordPcInfo(instruction, dex_pc);
962 DCHECK(instruction->IsSuspendCheck()
963 || instruction->IsBoundsCheck()
964 || instruction->IsNullCheck()
965 || instruction->IsDivZeroCheck()
966 || !IsLeafMethod());
967 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000968}
969
970void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
971 vixl::Register class_reg) {
972 UseScratchRegisterScope temps(GetVIXLAssembler());
973 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000974 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
975
Serban Constantinescu02164b32014-11-13 14:05:07 +0000976 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000977 if (kUseAcquireRelease) {
978 // TODO(vixl): Let the MacroAssembler handle MemOperand.
979 __ Add(temp, class_reg, status_offset);
980 __ Ldar(temp, HeapOperand(temp));
981 __ Cmp(temp, mirror::Class::kStatusInitialized);
982 __ B(lt, slow_path->GetEntryLabel());
983 } else {
984 __ Ldr(temp, HeapOperand(class_reg, status_offset));
985 __ Cmp(temp, mirror::Class::kStatusInitialized);
986 __ B(lt, slow_path->GetEntryLabel());
987 __ Dmb(InnerShareable, BarrierReads);
988 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000989 __ Bind(slow_path->GetExitLabel());
990}
Alexandre Rames5319def2014-10-23 10:03:10 +0100991
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000992void InstructionCodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
993 BarrierType type = BarrierAll;
994
995 switch (kind) {
996 case MemBarrierKind::kAnyAny:
997 case MemBarrierKind::kAnyStore: {
998 type = BarrierAll;
999 break;
1000 }
1001 case MemBarrierKind::kLoadAny: {
1002 type = BarrierReads;
1003 break;
1004 }
1005 case MemBarrierKind::kStoreStore: {
1006 type = BarrierWrites;
1007 break;
1008 }
1009 default:
1010 LOG(FATAL) << "Unexpected memory barrier " << kind;
1011 }
1012 __ Dmb(InnerShareable, type);
1013}
1014
Serban Constantinescu02164b32014-11-13 14:05:07 +00001015void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1016 HBasicBlock* successor) {
1017 SuspendCheckSlowPathARM64* slow_path =
1018 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1019 codegen_->AddSlowPath(slow_path);
1020 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1021 Register temp = temps.AcquireW();
1022
1023 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1024 if (successor == nullptr) {
1025 __ Cbnz(temp, slow_path->GetEntryLabel());
1026 __ Bind(slow_path->GetReturnLabel());
1027 } else {
1028 __ Cbz(temp, codegen_->GetLabelOf(successor));
1029 __ B(slow_path->GetEntryLabel());
1030 // slow_path will return to GetLabelOf(successor).
1031 }
1032}
1033
Alexandre Rames5319def2014-10-23 10:03:10 +01001034InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1035 CodeGeneratorARM64* codegen)
1036 : HGraphVisitor(graph),
1037 assembler_(codegen->GetAssembler()),
1038 codegen_(codegen) {}
1039
1040#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001041 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001042
1043#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1044
1045enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001046 // Using a base helps identify when we hit such breakpoints.
1047 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001048#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1049 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1050#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1051};
1052
1053#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
1054 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001055 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +01001056 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1057 } \
1058 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1059 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1060 locations->SetOut(Location::Any()); \
1061 }
1062 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1063#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1064
1065#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001066#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001067
Alexandre Rames67555f72014-11-18 10:55:16 +00001068void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001069 DCHECK_EQ(instr->InputCount(), 2U);
1070 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1071 Primitive::Type type = instr->GetResultType();
1072 switch (type) {
1073 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001074 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001075 locations->SetInAt(0, Location::RequiresRegister());
1076 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001077 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001078 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001079
1080 case Primitive::kPrimFloat:
1081 case Primitive::kPrimDouble:
1082 locations->SetInAt(0, Location::RequiresFpuRegister());
1083 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001084 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001085 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001086
Alexandre Rames5319def2014-10-23 10:03:10 +01001087 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001088 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001089 }
1090}
1091
Alexandre Rames67555f72014-11-18 10:55:16 +00001092void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001093 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001094
1095 switch (type) {
1096 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001097 case Primitive::kPrimLong: {
1098 Register dst = OutputRegister(instr);
1099 Register lhs = InputRegisterAt(instr, 0);
1100 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001101 if (instr->IsAdd()) {
1102 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001103 } else if (instr->IsAnd()) {
1104 __ And(dst, lhs, rhs);
1105 } else if (instr->IsOr()) {
1106 __ Orr(dst, lhs, rhs);
1107 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001108 __ Sub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001109 } else {
1110 DCHECK(instr->IsXor());
1111 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001112 }
1113 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001114 }
1115 case Primitive::kPrimFloat:
1116 case Primitive::kPrimDouble: {
1117 FPRegister dst = OutputFPRegister(instr);
1118 FPRegister lhs = InputFPRegisterAt(instr, 0);
1119 FPRegister rhs = InputFPRegisterAt(instr, 1);
1120 if (instr->IsAdd()) {
1121 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001122 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001123 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001124 } else {
1125 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001126 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001127 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001128 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001129 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001130 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001131 }
1132}
1133
Serban Constantinescu02164b32014-11-13 14:05:07 +00001134void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1135 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1136
1137 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1138 Primitive::Type type = instr->GetResultType();
1139 switch (type) {
1140 case Primitive::kPrimInt:
1141 case Primitive::kPrimLong: {
1142 locations->SetInAt(0, Location::RequiresRegister());
1143 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1144 locations->SetOut(Location::RequiresRegister());
1145 break;
1146 }
1147 default:
1148 LOG(FATAL) << "Unexpected shift type " << type;
1149 }
1150}
1151
1152void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1153 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1154
1155 Primitive::Type type = instr->GetType();
1156 switch (type) {
1157 case Primitive::kPrimInt:
1158 case Primitive::kPrimLong: {
1159 Register dst = OutputRegister(instr);
1160 Register lhs = InputRegisterAt(instr, 0);
1161 Operand rhs = InputOperandAt(instr, 1);
1162 if (rhs.IsImmediate()) {
1163 uint32_t shift_value = (type == Primitive::kPrimInt)
1164 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1165 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1166 if (instr->IsShl()) {
1167 __ Lsl(dst, lhs, shift_value);
1168 } else if (instr->IsShr()) {
1169 __ Asr(dst, lhs, shift_value);
1170 } else {
1171 __ Lsr(dst, lhs, shift_value);
1172 }
1173 } else {
1174 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1175
1176 if (instr->IsShl()) {
1177 __ Lsl(dst, lhs, rhs_reg);
1178 } else if (instr->IsShr()) {
1179 __ Asr(dst, lhs, rhs_reg);
1180 } else {
1181 __ Lsr(dst, lhs, rhs_reg);
1182 }
1183 }
1184 break;
1185 }
1186 default:
1187 LOG(FATAL) << "Unexpected shift operation type " << type;
1188 }
1189}
1190
Alexandre Rames5319def2014-10-23 10:03:10 +01001191void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001192 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001193}
1194
1195void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001196 HandleBinaryOp(instruction);
1197}
1198
1199void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1200 HandleBinaryOp(instruction);
1201}
1202
1203void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1204 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001205}
1206
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001207void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1208 LocationSummary* locations =
1209 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1210 locations->SetInAt(0, Location::RequiresRegister());
1211 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1212 locations->SetOut(Location::RequiresRegister());
1213}
1214
1215void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1216 LocationSummary* locations = instruction->GetLocations();
1217 Primitive::Type type = instruction->GetType();
1218 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001219 Location index = locations->InAt(1);
1220 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001221 MemOperand source = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001222 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001223
1224 if (index.IsConstant()) {
1225 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001226 source = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001227 } else {
1228 Register temp = temps.AcquireSameSizeAs(obj);
1229 Register index_reg = RegisterFrom(index, Primitive::kPrimInt);
1230 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001231 source = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001232 }
1233
Alexandre Rames67555f72014-11-18 10:55:16 +00001234 codegen_->Load(type, OutputCPURegister(instruction), source);
Calin Juravle77520bc2015-01-12 18:45:46 +00001235 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001236}
1237
Alexandre Rames5319def2014-10-23 10:03:10 +01001238void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1239 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1240 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001241 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001242}
1243
1244void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
1245 __ Ldr(OutputRegister(instruction),
1246 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
Calin Juravle77520bc2015-01-12 18:45:46 +00001247 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001248}
1249
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001250void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
1251 Primitive::Type value_type = instruction->GetComponentType();
1252 bool is_object = value_type == Primitive::kPrimNot;
1253 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1254 instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
1255 if (is_object) {
1256 InvokeRuntimeCallingConvention calling_convention;
1257 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1258 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1259 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1260 } else {
1261 locations->SetInAt(0, Location::RequiresRegister());
1262 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1263 locations->SetInAt(2, Location::RequiresRegister());
1264 }
1265}
1266
1267void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1268 Primitive::Type value_type = instruction->GetComponentType();
1269 if (value_type == Primitive::kPrimNot) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001270 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001271 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001272 } else {
1273 LocationSummary* locations = instruction->GetLocations();
1274 Register obj = InputRegisterAt(instruction, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001275 CPURegister value = InputCPURegisterAt(instruction, 2);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001276 Location index = locations->InAt(1);
1277 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001278 MemOperand destination = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001279 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001280
1281 if (index.IsConstant()) {
1282 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001283 destination = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001284 } else {
1285 Register temp = temps.AcquireSameSizeAs(obj);
1286 Register index_reg = InputRegisterAt(instruction, 1);
1287 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001288 destination = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001289 }
1290
1291 codegen_->Store(value_type, value, destination);
Calin Juravle77520bc2015-01-12 18:45:46 +00001292 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001293 }
1294}
1295
Alexandre Rames67555f72014-11-18 10:55:16 +00001296void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1297 LocationSummary* locations =
1298 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1299 locations->SetInAt(0, Location::RequiresRegister());
1300 locations->SetInAt(1, Location::RequiresRegister());
1301 if (instruction->HasUses()) {
1302 locations->SetOut(Location::SameAsFirstInput());
1303 }
1304}
1305
1306void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001307 LocationSummary* locations = instruction->GetLocations();
1308 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1309 instruction, locations->InAt(0), locations->InAt(1));
Alexandre Rames67555f72014-11-18 10:55:16 +00001310 codegen_->AddSlowPath(slow_path);
1311
1312 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1313 __ B(slow_path->GetEntryLabel(), hs);
1314}
1315
1316void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
1317 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1318 instruction, LocationSummary::kCallOnSlowPath);
1319 locations->SetInAt(0, Location::RequiresRegister());
1320 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001321 locations->AddTemp(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001322}
1323
1324void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001325 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +00001326 Register obj = InputRegisterAt(instruction, 0);;
1327 Register cls = InputRegisterAt(instruction, 1);;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001328 Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
Alexandre Rames67555f72014-11-18 10:55:16 +00001329
Alexandre Rames3e69f162014-12-10 10:36:50 +00001330 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1331 instruction, locations->InAt(1), LocationFrom(obj_cls), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001332 codegen_->AddSlowPath(slow_path);
1333
1334 // TODO: avoid this check if we know obj is not null.
1335 __ Cbz(obj, slow_path->GetExitLabel());
1336 // Compare the class of `obj` with `cls`.
Alexandre Rames3e69f162014-12-10 10:36:50 +00001337 __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
1338 __ Cmp(obj_cls, cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00001339 __ B(ne, slow_path->GetEntryLabel());
1340 __ Bind(slow_path->GetExitLabel());
1341}
1342
1343void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1344 LocationSummary* locations =
1345 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1346 locations->SetInAt(0, Location::RequiresRegister());
1347 if (check->HasUses()) {
1348 locations->SetOut(Location::SameAsFirstInput());
1349 }
1350}
1351
1352void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1353 // We assume the class is not null.
1354 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1355 check->GetLoadClass(), check, check->GetDexPc(), true);
1356 codegen_->AddSlowPath(slow_path);
1357 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1358}
1359
Serban Constantinescu02164b32014-11-13 14:05:07 +00001360void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001361 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00001362 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1363 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001364 switch (in_type) {
1365 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001366 locations->SetInAt(0, Location::RequiresRegister());
1367 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
1368 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1369 break;
1370 }
1371 case Primitive::kPrimFloat:
1372 case Primitive::kPrimDouble: {
1373 locations->SetInAt(0, Location::RequiresFpuRegister());
1374 locations->SetInAt(1, Location::RequiresFpuRegister());
1375 locations->SetOut(Location::RequiresRegister());
1376 break;
1377 }
1378 default:
1379 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1380 }
1381}
1382
1383void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1384 Primitive::Type in_type = compare->InputAt(0)->GetType();
1385
1386 // 0 if: left == right
1387 // 1 if: left > right
1388 // -1 if: left < right
1389 switch (in_type) {
1390 case Primitive::kPrimLong: {
1391 Register result = OutputRegister(compare);
1392 Register left = InputRegisterAt(compare, 0);
1393 Operand right = InputOperandAt(compare, 1);
1394
1395 __ Cmp(left, right);
1396 __ Cset(result, ne);
1397 __ Cneg(result, result, lt);
1398 break;
1399 }
1400 case Primitive::kPrimFloat:
1401 case Primitive::kPrimDouble: {
1402 Register result = OutputRegister(compare);
1403 FPRegister left = InputFPRegisterAt(compare, 0);
1404 FPRegister right = InputFPRegisterAt(compare, 1);
1405
1406 __ Fcmp(left, right);
1407 if (compare->IsGtBias()) {
1408 __ Cset(result, ne);
1409 } else {
1410 __ Csetm(result, ne);
1411 }
1412 __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
Alexandre Rames5319def2014-10-23 10:03:10 +01001413 break;
1414 }
1415 default:
1416 LOG(FATAL) << "Unimplemented compare type " << in_type;
1417 }
1418}
1419
1420void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1421 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1422 locations->SetInAt(0, Location::RequiresRegister());
1423 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1424 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001425 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001426 }
1427}
1428
1429void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1430 if (!instruction->NeedsMaterialization()) {
1431 return;
1432 }
1433
1434 LocationSummary* locations = instruction->GetLocations();
1435 Register lhs = InputRegisterAt(instruction, 0);
1436 Operand rhs = InputOperandAt(instruction, 1);
1437 Register res = RegisterFrom(locations->Out(), instruction->GetType());
1438 Condition cond = ARM64Condition(instruction->GetCondition());
1439
1440 __ Cmp(lhs, rhs);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001441 __ Cset(res, cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001442}
1443
1444#define FOR_EACH_CONDITION_INSTRUCTION(M) \
1445 M(Equal) \
1446 M(NotEqual) \
1447 M(LessThan) \
1448 M(LessThanOrEqual) \
1449 M(GreaterThan) \
1450 M(GreaterThanOrEqual)
1451#define DEFINE_CONDITION_VISITORS(Name) \
1452void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
1453void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1454FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00001455#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01001456#undef FOR_EACH_CONDITION_INSTRUCTION
1457
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001458void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1459 LocationSummary* locations =
1460 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1461 switch (div->GetResultType()) {
1462 case Primitive::kPrimInt:
1463 case Primitive::kPrimLong:
1464 locations->SetInAt(0, Location::RequiresRegister());
1465 locations->SetInAt(1, Location::RequiresRegister());
1466 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1467 break;
1468
1469 case Primitive::kPrimFloat:
1470 case Primitive::kPrimDouble:
1471 locations->SetInAt(0, Location::RequiresFpuRegister());
1472 locations->SetInAt(1, Location::RequiresFpuRegister());
1473 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1474 break;
1475
1476 default:
1477 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1478 }
1479}
1480
1481void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1482 Primitive::Type type = div->GetResultType();
1483 switch (type) {
1484 case Primitive::kPrimInt:
1485 case Primitive::kPrimLong:
1486 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
1487 break;
1488
1489 case Primitive::kPrimFloat:
1490 case Primitive::kPrimDouble:
1491 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1492 break;
1493
1494 default:
1495 LOG(FATAL) << "Unexpected div type " << type;
1496 }
1497}
1498
Alexandre Rames67555f72014-11-18 10:55:16 +00001499void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1500 LocationSummary* locations =
1501 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1502 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1503 if (instruction->HasUses()) {
1504 locations->SetOut(Location::SameAsFirstInput());
1505 }
1506}
1507
1508void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1509 SlowPathCodeARM64* slow_path =
1510 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
1511 codegen_->AddSlowPath(slow_path);
1512 Location value = instruction->GetLocations()->InAt(0);
1513
Alexandre Rames3e69f162014-12-10 10:36:50 +00001514 Primitive::Type type = instruction->GetType();
1515
1516 if ((type != Primitive::kPrimInt) && (type != Primitive::kPrimLong)) {
1517 LOG(FATAL) << "Unexpected type " << type << "for DivZeroCheck.";
1518 return;
1519 }
1520
Alexandre Rames67555f72014-11-18 10:55:16 +00001521 if (value.IsConstant()) {
1522 int64_t divisor = Int64ConstantFrom(value);
1523 if (divisor == 0) {
1524 __ B(slow_path->GetEntryLabel());
1525 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001526 // A division by a non-null constant is valid. We don't need to perform
1527 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00001528 }
1529 } else {
1530 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
1531 }
1532}
1533
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001534void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1535 LocationSummary* locations =
1536 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1537 locations->SetOut(Location::ConstantLocation(constant));
1538}
1539
1540void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1541 UNUSED(constant);
1542 // Will be generated at use site.
1543}
1544
Alexandre Rames5319def2014-10-23 10:03:10 +01001545void LocationsBuilderARM64::VisitExit(HExit* exit) {
1546 exit->SetLocations(nullptr);
1547}
1548
1549void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001550 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +01001551 if (kIsDebugBuild) {
1552 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
Alexandre Rames67555f72014-11-18 10:55:16 +00001553 __ Brk(__LINE__); // TODO: Introduce special markers for such code locations.
Alexandre Rames5319def2014-10-23 10:03:10 +01001554 }
1555}
1556
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001557void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
1558 LocationSummary* locations =
1559 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1560 locations->SetOut(Location::ConstantLocation(constant));
1561}
1562
1563void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
1564 UNUSED(constant);
1565 // Will be generated at use site.
1566}
1567
Alexandre Rames5319def2014-10-23 10:03:10 +01001568void LocationsBuilderARM64::VisitGoto(HGoto* got) {
1569 got->SetLocations(nullptr);
1570}
1571
1572void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
1573 HBasicBlock* successor = got->GetSuccessor();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001574 DCHECK(!successor->IsExitBlock());
1575 HBasicBlock* block = got->GetBlock();
1576 HInstruction* previous = got->GetPrevious();
1577 HLoopInformation* info = block->GetLoopInformation();
1578
1579 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
1580 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
1581 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1582 return;
1583 }
1584 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1585 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1586 }
1587 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001588 __ B(codegen_->GetLabelOf(successor));
1589 }
1590}
1591
1592void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
1593 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1594 HInstruction* cond = if_instr->InputAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001595 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001596 locations->SetInAt(0, Location::RequiresRegister());
1597 }
1598}
1599
1600void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
1601 HInstruction* cond = if_instr->InputAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001602 HCondition* condition = cond->AsCondition();
1603 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1604 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1605
Serban Constantinescu02164b32014-11-13 14:05:07 +00001606 if (cond->IsIntConstant()) {
1607 int32_t cond_value = cond->AsIntConstant()->GetValue();
1608 if (cond_value == 1) {
1609 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfTrueSuccessor())) {
1610 __ B(true_target);
1611 }
1612 return;
1613 } else {
1614 DCHECK_EQ(cond_value, 0);
1615 }
1616 } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001617 // The condition instruction has been materialized, compare the output to 0.
1618 Location cond_val = if_instr->GetLocations()->InAt(0);
1619 DCHECK(cond_val.IsRegister());
1620 __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001621 } else {
1622 // The condition instruction has not been materialized, use its inputs as
1623 // the comparison and its condition as the branch condition.
1624 Register lhs = InputRegisterAt(condition, 0);
1625 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001626 Condition arm64_cond = ARM64Condition(condition->GetCondition());
1627 if ((arm64_cond == eq || arm64_cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
1628 if (arm64_cond == eq) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001629 __ Cbz(lhs, true_target);
1630 } else {
1631 __ Cbnz(lhs, true_target);
1632 }
1633 } else {
1634 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001635 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001636 }
1637 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001638 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
1639 __ B(false_target);
1640 }
1641}
1642
1643void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001644 LocationSummary* locations =
1645 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001646 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001647 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001648}
1649
1650void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001651 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001652
1653 if (instruction->IsVolatile()) {
1654 if (kUseAcquireRelease) {
Calin Juravle77520bc2015-01-12 18:45:46 +00001655 // NB: LoadAcquire will record the pc info if needed.
1656 codegen_->LoadAcquire(instruction, OutputCPURegister(instruction), field);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001657 } else {
1658 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
Calin Juravle77520bc2015-01-12 18:45:46 +00001659 codegen_->MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001660 // For IRIW sequential consistency kLoadAny is not sufficient.
1661 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1662 }
1663 } else {
1664 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
Calin Juravle77520bc2015-01-12 18:45:46 +00001665 codegen_->MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001666 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001667}
1668
1669void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001670 LocationSummary* locations =
1671 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001672 locations->SetInAt(0, Location::RequiresRegister());
1673 locations->SetInAt(1, Location::RequiresRegister());
1674}
1675
1676void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001677 Register obj = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001678 CPURegister value = InputCPURegisterAt(instruction, 1);
1679 Offset offset = instruction->GetFieldOffset();
1680 Primitive::Type field_type = instruction->GetFieldType();
1681
1682 if (instruction->IsVolatile()) {
1683 if (kUseAcquireRelease) {
1684 codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00001685 codegen_->MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001686 } else {
1687 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
1688 codegen_->Store(field_type, value, HeapOperand(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00001689 codegen_->MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001690 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1691 }
1692 } else {
1693 codegen_->Store(field_type, value, HeapOperand(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00001694 codegen_->MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001695 }
1696
1697 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001698 codegen_->MarkGCCard(obj, Register(value));
Alexandre Rames5319def2014-10-23 10:03:10 +01001699 }
1700}
1701
Alexandre Rames67555f72014-11-18 10:55:16 +00001702void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
1703 LocationSummary::CallKind call_kind =
1704 instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
1705 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
1706 locations->SetInAt(0, Location::RequiresRegister());
1707 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001708 // The output does overlap inputs.
1709 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexandre Rames67555f72014-11-18 10:55:16 +00001710}
1711
1712void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
1713 LocationSummary* locations = instruction->GetLocations();
1714 Register obj = InputRegisterAt(instruction, 0);;
1715 Register cls = InputRegisterAt(instruction, 1);;
1716 Register out = OutputRegister(instruction);
1717
1718 vixl::Label done;
1719
1720 // Return 0 if `obj` is null.
1721 // TODO: Avoid this check if we know `obj` is not null.
1722 __ Mov(out, 0);
1723 __ Cbz(obj, &done);
1724
1725 // Compare the class of `obj` with `cls`.
Serban Constantinescu02164b32014-11-13 14:05:07 +00001726 __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
Alexandre Rames67555f72014-11-18 10:55:16 +00001727 __ Cmp(out, cls);
1728 if (instruction->IsClassFinal()) {
1729 // Classes must be equal for the instanceof to succeed.
1730 __ Cset(out, eq);
1731 } else {
1732 // If the classes are not equal, we go into a slow path.
1733 DCHECK(locations->OnlyCallsOnSlowPath());
1734 SlowPathCodeARM64* slow_path =
Alexandre Rames3e69f162014-12-10 10:36:50 +00001735 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1736 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001737 codegen_->AddSlowPath(slow_path);
1738 __ B(ne, slow_path->GetEntryLabel());
1739 __ Mov(out, 1);
1740 __ Bind(slow_path->GetExitLabel());
1741 }
1742
1743 __ Bind(&done);
1744}
1745
Alexandre Rames5319def2014-10-23 10:03:10 +01001746void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
1747 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1748 locations->SetOut(Location::ConstantLocation(constant));
1749}
1750
1751void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
1752 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001753 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001754}
1755
Alexandre Rames5319def2014-10-23 10:03:10 +01001756void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
1757 LocationSummary* locations =
1758 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1759 locations->AddTemp(LocationFrom(x0));
1760
1761 InvokeDexCallingConventionVisitor calling_convention_visitor;
1762 for (size_t i = 0; i < invoke->InputCount(); i++) {
1763 HInstruction* input = invoke->InputAt(i);
1764 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1765 }
1766
1767 Primitive::Type return_type = invoke->GetType();
1768 if (return_type != Primitive::kPrimVoid) {
1769 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
1770 }
1771}
1772
Alexandre Rames67555f72014-11-18 10:55:16 +00001773void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1774 HandleInvoke(invoke);
1775}
1776
1777void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1778 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1779 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1780 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1781 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1782 Location receiver = invoke->GetLocations()->InAt(0);
1783 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00001784 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00001785
1786 // The register ip1 is required to be used for the hidden argument in
1787 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
1788 UseScratchRegisterScope scratch_scope(GetVIXLAssembler());
1789 scratch_scope.Exclude(ip1);
1790 __ Mov(ip1, invoke->GetDexMethodIndex());
1791
1792 // temp = object->GetClass();
1793 if (receiver.IsStackSlot()) {
1794 __ Ldr(temp, StackOperandFrom(receiver));
1795 __ Ldr(temp, HeapOperand(temp, class_offset));
1796 } else {
1797 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
1798 }
Calin Juravle77520bc2015-01-12 18:45:46 +00001799 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00001800 // temp = temp->GetImtEntryAt(method_offset);
1801 __ Ldr(temp, HeapOperand(temp, method_offset));
1802 // lr = temp->GetEntryPoint();
1803 __ Ldr(lr, HeapOperand(temp, entry_point));
1804 // lr();
1805 __ Blr(lr);
1806 DCHECK(!codegen_->IsLeafMethod());
1807 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1808}
1809
1810void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001811 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
1812 if (intrinsic.TryDispatch(invoke)) {
1813 return;
1814 }
1815
Alexandre Rames67555f72014-11-18 10:55:16 +00001816 HandleInvoke(invoke);
1817}
1818
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001819void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001820 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
1821 if (intrinsic.TryDispatch(invoke)) {
1822 return;
1823 }
1824
Alexandre Rames67555f72014-11-18 10:55:16 +00001825 HandleInvoke(invoke);
1826}
1827
Andreas Gampe878d58c2015-01-15 23:24:00 -08001828static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
1829 if (invoke->GetLocations()->Intrinsified()) {
1830 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
1831 intrinsic.Dispatch(invoke);
1832 return true;
1833 }
1834 return false;
1835}
1836
1837void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
1838 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
1839 DCHECK(temp.Is(kArtMethodRegister));
Alexandre Rames5319def2014-10-23 10:03:10 +01001840 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
Andreas Gampe878d58c2015-01-15 23:24:00 -08001841 invoke->GetDexMethodIndex() * kHeapRefSize;
Alexandre Rames5319def2014-10-23 10:03:10 +01001842
1843 // TODO: Implement all kinds of calls:
1844 // 1) boot -> boot
1845 // 2) app -> boot
1846 // 3) app -> app
1847 //
1848 // Currently we implement the app -> app logic, which looks up in the resolve cache.
1849
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001850 if (!invoke->IsRecursive()) {
1851 // temp = method;
1852 LoadCurrentMethod(temp);
1853 // temp = temp->dex_cache_resolved_methods_;
1854 __ Ldr(temp, HeapOperand(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset()));
1855 // temp = temp[index_in_cache];
1856 __ Ldr(temp, HeapOperand(temp, index_in_cache));
1857 // lr = temp->entry_point_from_quick_compiled_code_;
1858 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1859 kArm64WordSize)));
1860 // lr();
1861 __ Blr(lr);
1862 } else {
1863 __ Bl(&frame_entry_label_);
1864 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001865
Andreas Gampe878d58c2015-01-15 23:24:00 -08001866 RecordPcInfo(invoke, invoke->GetDexPc());
1867 DCHECK(!IsLeafMethod());
1868}
1869
1870void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1871 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1872 return;
1873 }
1874
1875 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1876 codegen_->GenerateStaticOrDirectCall(invoke, temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01001877}
1878
1879void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001880 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1881 return;
1882 }
1883
Alexandre Rames5319def2014-10-23 10:03:10 +01001884 LocationSummary* locations = invoke->GetLocations();
1885 Location receiver = locations->InAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001886 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01001887 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
1888 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1889 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00001890 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames5319def2014-10-23 10:03:10 +01001891
1892 // temp = object->GetClass();
1893 if (receiver.IsStackSlot()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001894 __ Ldr(temp, MemOperand(sp, receiver.GetStackIndex()));
1895 __ Ldr(temp, HeapOperand(temp, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01001896 } else {
1897 DCHECK(receiver.IsRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00001898 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01001899 }
Calin Juravle77520bc2015-01-12 18:45:46 +00001900 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames5319def2014-10-23 10:03:10 +01001901 // temp = temp->GetMethodAt(method_offset);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001902 __ Ldr(temp, HeapOperand(temp, method_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01001903 // lr = temp->GetEntryPoint();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001904 __ Ldr(lr, HeapOperand(temp, entry_point.SizeValue()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001905 // lr();
1906 __ Blr(lr);
1907 DCHECK(!codegen_->IsLeafMethod());
1908 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1909}
1910
Alexandre Rames67555f72014-11-18 10:55:16 +00001911void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
1912 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
1913 : LocationSummary::kNoCall;
1914 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
1915 locations->SetOut(Location::RequiresRegister());
1916}
1917
1918void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
1919 Register out = OutputRegister(cls);
1920 if (cls->IsReferrersClass()) {
1921 DCHECK(!cls->CanCallRuntime());
1922 DCHECK(!cls->MustGenerateClinitCheck());
1923 codegen_->LoadCurrentMethod(out);
1924 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
1925 } else {
1926 DCHECK(cls->CanCallRuntime());
1927 codegen_->LoadCurrentMethod(out);
1928 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001929 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00001930
1931 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1932 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
1933 codegen_->AddSlowPath(slow_path);
1934 __ Cbz(out, slow_path->GetEntryLabel());
1935 if (cls->MustGenerateClinitCheck()) {
1936 GenerateClassInitializationCheck(slow_path, out);
1937 } else {
1938 __ Bind(slow_path->GetExitLabel());
1939 }
1940 }
1941}
1942
1943void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
1944 LocationSummary* locations =
1945 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
1946 locations->SetOut(Location::RequiresRegister());
1947}
1948
1949void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
1950 MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
1951 __ Ldr(OutputRegister(instruction), exception);
1952 __ Str(wzr, exception);
1953}
1954
Alexandre Rames5319def2014-10-23 10:03:10 +01001955void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
1956 load->SetLocations(nullptr);
1957}
1958
1959void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
1960 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001961 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01001962}
1963
Alexandre Rames67555f72014-11-18 10:55:16 +00001964void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
1965 LocationSummary* locations =
1966 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
1967 locations->SetOut(Location::RequiresRegister());
1968}
1969
1970void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
1971 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
1972 codegen_->AddSlowPath(slow_path);
1973
1974 Register out = OutputRegister(load);
1975 codegen_->LoadCurrentMethod(out);
Mathieu Chartiereace4582014-11-24 18:29:54 -08001976 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
1977 __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001978 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00001979 __ Cbz(out, slow_path->GetEntryLabel());
1980 __ Bind(slow_path->GetExitLabel());
1981}
1982
Alexandre Rames5319def2014-10-23 10:03:10 +01001983void LocationsBuilderARM64::VisitLocal(HLocal* local) {
1984 local->SetLocations(nullptr);
1985}
1986
1987void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
1988 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1989}
1990
1991void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
1992 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1993 locations->SetOut(Location::ConstantLocation(constant));
1994}
1995
1996void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
1997 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001998 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001999}
2000
Alexandre Rames67555f72014-11-18 10:55:16 +00002001void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2002 LocationSummary* locations =
2003 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2004 InvokeRuntimeCallingConvention calling_convention;
2005 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2006}
2007
2008void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2009 codegen_->InvokeRuntime(instruction->IsEnter()
2010 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2011 instruction,
2012 instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002013 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002014}
2015
Alexandre Rames42d641b2014-10-27 14:00:51 +00002016void LocationsBuilderARM64::VisitMul(HMul* mul) {
2017 LocationSummary* locations =
2018 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2019 switch (mul->GetResultType()) {
2020 case Primitive::kPrimInt:
2021 case Primitive::kPrimLong:
2022 locations->SetInAt(0, Location::RequiresRegister());
2023 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002024 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002025 break;
2026
2027 case Primitive::kPrimFloat:
2028 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002029 locations->SetInAt(0, Location::RequiresFpuRegister());
2030 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002031 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002032 break;
2033
2034 default:
2035 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2036 }
2037}
2038
2039void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
2040 switch (mul->GetResultType()) {
2041 case Primitive::kPrimInt:
2042 case Primitive::kPrimLong:
2043 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2044 break;
2045
2046 case Primitive::kPrimFloat:
2047 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002048 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00002049 break;
2050
2051 default:
2052 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2053 }
2054}
2055
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002056void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
2057 LocationSummary* locations =
2058 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2059 switch (neg->GetResultType()) {
2060 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00002061 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002062 locations->SetInAt(0, Location::RegisterOrConstant(neg->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00002063 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002064 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002065
2066 case Primitive::kPrimFloat:
2067 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002068 locations->SetInAt(0, Location::RequiresFpuRegister());
2069 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002070 break;
2071
2072 default:
2073 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2074 }
2075}
2076
2077void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
2078 switch (neg->GetResultType()) {
2079 case Primitive::kPrimInt:
2080 case Primitive::kPrimLong:
2081 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
2082 break;
2083
2084 case Primitive::kPrimFloat:
2085 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002086 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002087 break;
2088
2089 default:
2090 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2091 }
2092}
2093
2094void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
2095 LocationSummary* locations =
2096 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2097 InvokeRuntimeCallingConvention calling_convention;
2098 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002099 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002100 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002101 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2102 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2103 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002104}
2105
2106void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
2107 LocationSummary* locations = instruction->GetLocations();
2108 InvokeRuntimeCallingConvention calling_convention;
2109 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2110 DCHECK(type_index.Is(w0));
2111 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002112 DCHECK(current_method.Is(w2));
Alexandre Rames67555f72014-11-18 10:55:16 +00002113 codegen_->LoadCurrentMethod(current_method);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002114 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002115 codegen_->InvokeRuntime(
2116 QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002117 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2118 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002119}
2120
Alexandre Rames5319def2014-10-23 10:03:10 +01002121void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
2122 LocationSummary* locations =
2123 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2124 InvokeRuntimeCallingConvention calling_convention;
2125 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2126 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2127 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002128 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002129}
2130
2131void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
2132 LocationSummary* locations = instruction->GetLocations();
2133 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2134 DCHECK(type_index.Is(w0));
2135 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2136 DCHECK(current_method.Is(w1));
Alexandre Rames67555f72014-11-18 10:55:16 +00002137 codegen_->LoadCurrentMethod(current_method);
Alexandre Rames5319def2014-10-23 10:03:10 +01002138 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002139 codegen_->InvokeRuntime(
2140 QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002141 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002142}
2143
2144void LocationsBuilderARM64::VisitNot(HNot* instruction) {
2145 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00002146 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002147 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002148}
2149
2150void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
2151 switch (instruction->InputAt(0)->GetType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002152 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002153 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01002154 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002155 break;
2156
2157 default:
2158 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
2159 }
2160}
2161
2162void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
2163 LocationSummary* locations =
2164 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2165 locations->SetInAt(0, Location::RequiresRegister());
2166 if (instruction->HasUses()) {
2167 locations->SetOut(Location::SameAsFirstInput());
2168 }
2169}
2170
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002171void InstructionCodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00002172 if (codegen_->CanMoveNullCheckToUser(instruction)) {
2173 return;
2174 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002175 Location obj = instruction->GetLocations()->InAt(0);
2176
2177 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
2178 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2179}
2180
2181void InstructionCodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002182 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
2183 codegen_->AddSlowPath(slow_path);
2184
2185 LocationSummary* locations = instruction->GetLocations();
2186 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00002187
2188 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01002189}
2190
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002191void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
2192 if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2193 GenerateImplicitNullCheck(instruction);
2194 } else {
2195 GenerateExplicitNullCheck(instruction);
2196 }
2197}
2198
Alexandre Rames67555f72014-11-18 10:55:16 +00002199void LocationsBuilderARM64::VisitOr(HOr* instruction) {
2200 HandleBinaryOp(instruction);
2201}
2202
2203void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
2204 HandleBinaryOp(instruction);
2205}
2206
Alexandre Rames3e69f162014-12-10 10:36:50 +00002207void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2208 LOG(FATAL) << "Unreachable";
2209}
2210
2211void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
2212 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2213}
2214
Alexandre Rames5319def2014-10-23 10:03:10 +01002215void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
2216 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2217 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2218 if (location.IsStackSlot()) {
2219 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2220 } else if (location.IsDoubleStackSlot()) {
2221 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2222 }
2223 locations->SetOut(location);
2224}
2225
2226void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
2227 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002228 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002229}
2230
2231void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
2232 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2233 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2234 locations->SetInAt(i, Location::Any());
2235 }
2236 locations->SetOut(Location::Any());
2237}
2238
2239void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002240 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002241 LOG(FATAL) << "Unreachable";
2242}
2243
Serban Constantinescu02164b32014-11-13 14:05:07 +00002244void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002245 Primitive::Type type = rem->GetResultType();
2246 LocationSummary::CallKind call_kind = IsFPType(type) ? LocationSummary::kCall
2247 : LocationSummary::kNoCall;
2248 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2249
2250 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002251 case Primitive::kPrimInt:
2252 case Primitive::kPrimLong:
2253 locations->SetInAt(0, Location::RequiresRegister());
2254 locations->SetInAt(1, Location::RequiresRegister());
2255 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2256 break;
2257
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002258 case Primitive::kPrimFloat:
2259 case Primitive::kPrimDouble: {
2260 InvokeRuntimeCallingConvention calling_convention;
2261 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
2262 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
2263 locations->SetOut(calling_convention.GetReturnLocation(type));
2264
2265 break;
2266 }
2267
Serban Constantinescu02164b32014-11-13 14:05:07 +00002268 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002269 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00002270 }
2271}
2272
2273void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
2274 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002275
Serban Constantinescu02164b32014-11-13 14:05:07 +00002276 switch (type) {
2277 case Primitive::kPrimInt:
2278 case Primitive::kPrimLong: {
2279 UseScratchRegisterScope temps(GetVIXLAssembler());
2280 Register dividend = InputRegisterAt(rem, 0);
2281 Register divisor = InputRegisterAt(rem, 1);
2282 Register output = OutputRegister(rem);
2283 Register temp = temps.AcquireSameSizeAs(output);
2284
2285 __ Sdiv(temp, dividend, divisor);
2286 __ Msub(output, temp, divisor, dividend);
2287 break;
2288 }
2289
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002290 case Primitive::kPrimFloat:
2291 case Primitive::kPrimDouble: {
2292 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
2293 : QUICK_ENTRY_POINT(pFmod);
2294 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc());
2295 break;
2296 }
2297
Serban Constantinescu02164b32014-11-13 14:05:07 +00002298 default:
2299 LOG(FATAL) << "Unexpected rem type " << type;
2300 }
2301}
2302
Alexandre Rames5319def2014-10-23 10:03:10 +01002303void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
2304 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2305 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002306 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01002307}
2308
2309void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002310 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002311 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002312 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002313}
2314
2315void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
2316 instruction->SetLocations(nullptr);
2317}
2318
2319void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002320 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002321 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002322 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002323}
2324
Serban Constantinescu02164b32014-11-13 14:05:07 +00002325void LocationsBuilderARM64::VisitShl(HShl* shl) {
2326 HandleShift(shl);
2327}
2328
2329void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
2330 HandleShift(shl);
2331}
2332
2333void LocationsBuilderARM64::VisitShr(HShr* shr) {
2334 HandleShift(shr);
2335}
2336
2337void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
2338 HandleShift(shr);
2339}
2340
Alexandre Rames5319def2014-10-23 10:03:10 +01002341void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
2342 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
2343 Primitive::Type field_type = store->InputAt(1)->GetType();
2344 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002345 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01002346 case Primitive::kPrimBoolean:
2347 case Primitive::kPrimByte:
2348 case Primitive::kPrimChar:
2349 case Primitive::kPrimShort:
2350 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002351 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01002352 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
2353 break;
2354
2355 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002356 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01002357 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
2358 break;
2359
2360 default:
2361 LOG(FATAL) << "Unimplemented local type " << field_type;
2362 }
2363}
2364
2365void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002366 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01002367}
2368
2369void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002370 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002371}
2372
2373void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002374 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002375}
2376
Alexandre Rames67555f72014-11-18 10:55:16 +00002377void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2378 LocationSummary* locations =
2379 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2380 locations->SetInAt(0, Location::RequiresRegister());
2381 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2382}
2383
2384void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002385 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002386
2387 if (instruction->IsVolatile()) {
2388 if (kUseAcquireRelease) {
Calin Juravle77520bc2015-01-12 18:45:46 +00002389 // NB: LoadAcquire will record the pc info if needed.
2390 codegen_->LoadAcquire(instruction, OutputCPURegister(instruction), field);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002391 } else {
2392 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2393 // For IRIW sequential consistency kLoadAny is not sufficient.
2394 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2395 }
2396 } else {
2397 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2398 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002399}
2400
2401void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002402 LocationSummary* locations =
2403 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2404 locations->SetInAt(0, Location::RequiresRegister());
2405 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01002406}
2407
Alexandre Rames67555f72014-11-18 10:55:16 +00002408void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002409 Register cls = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002410 CPURegister value = InputCPURegisterAt(instruction, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002411 Offset offset = instruction->GetFieldOffset();
Alexandre Rames67555f72014-11-18 10:55:16 +00002412 Primitive::Type field_type = instruction->GetFieldType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002413
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002414 if (instruction->IsVolatile()) {
2415 if (kUseAcquireRelease) {
2416 codegen_->StoreRelease(field_type, value, HeapOperand(cls, offset));
2417 } else {
2418 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2419 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2420 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2421 }
2422 } else {
2423 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2424 }
2425
2426 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002427 codegen_->MarkGCCard(cls, Register(value));
2428 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002429}
2430
2431void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2432 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2433}
2434
2435void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002436 HBasicBlock* block = instruction->GetBlock();
2437 if (block->GetLoopInformation() != nullptr) {
2438 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2439 // The back edge will generate the suspend check.
2440 return;
2441 }
2442 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2443 // The goto will generate the suspend check.
2444 return;
2445 }
2446 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01002447}
2448
2449void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
2450 temp->SetLocations(nullptr);
2451}
2452
2453void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
2454 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002455 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01002456}
2457
Alexandre Rames67555f72014-11-18 10:55:16 +00002458void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
2459 LocationSummary* locations =
2460 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2461 InvokeRuntimeCallingConvention calling_convention;
2462 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2463}
2464
2465void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
2466 codegen_->InvokeRuntime(
2467 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002468 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002469}
2470
2471void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
2472 LocationSummary* locations =
2473 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2474 Primitive::Type input_type = conversion->GetInputType();
2475 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002476 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00002477 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
2478 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
2479 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
2480 }
2481
2482 if (IsFPType(input_type)) {
2483 locations->SetInAt(0, Location::RequiresFpuRegister());
2484 } else {
2485 locations->SetInAt(0, Location::RequiresRegister());
2486 }
2487
2488 if (IsFPType(result_type)) {
2489 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2490 } else {
2491 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2492 }
2493}
2494
2495void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
2496 Primitive::Type result_type = conversion->GetResultType();
2497 Primitive::Type input_type = conversion->GetInputType();
2498
2499 DCHECK_NE(input_type, result_type);
2500
2501 if (IsIntegralType(result_type) && IsIntegralType(input_type)) {
2502 int result_size = Primitive::ComponentSize(result_type);
2503 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002504 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002505 Register output = OutputRegister(conversion);
2506 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002507 if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
2508 __ Ubfx(output, source, 0, result_size * kBitsPerByte);
2509 } else if ((result_type == Primitive::kPrimChar) ||
2510 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
2511 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002512 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002513 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002514 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002515 } else if (IsFPType(result_type) && IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002516 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
2517 } else if (IsIntegralType(result_type) && IsFPType(input_type)) {
2518 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
2519 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
2520 } else if (IsFPType(result_type) && IsFPType(input_type)) {
2521 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
2522 } else {
2523 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
2524 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00002525 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002526}
Alexandre Rames67555f72014-11-18 10:55:16 +00002527
Serban Constantinescu02164b32014-11-13 14:05:07 +00002528void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
2529 HandleShift(ushr);
2530}
2531
2532void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
2533 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00002534}
2535
2536void LocationsBuilderARM64::VisitXor(HXor* instruction) {
2537 HandleBinaryOp(instruction);
2538}
2539
2540void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
2541 HandleBinaryOp(instruction);
2542}
2543
2544#undef __
2545#undef QUICK_ENTRY_POINT
2546
Alexandre Rames5319def2014-10-23 10:03:10 +01002547} // namespace arm64
2548} // namespace art