blob: cf596c7bf11216cab0e02ab144ee497075b147bb [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Vladimir Markof4f2daa2017-03-20 18:26:59 +000019#include "arch/arm64/asm_support_arm64.h"
Serban Constantinescu579885a2015-02-22 20:51:33 +000020#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070021#include "art_method.h"
Andreas Gampe5678db52017-06-08 14:11:18 -070022#include "base/bit_utils.h"
23#include "base/bit_utils_iterator.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010024#include "class_table.h"
Zheng Xuc6667102015-05-15 16:08:45 +080025#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000026#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010027#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080028#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010029#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010030#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070031#include "heap_poisoning.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080032#include "intrinsics.h"
33#include "intrinsics_arm64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010034#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070035#include "lock_word.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010036#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070037#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000038#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010039#include "thread.h"
40#include "utils/arm64/assembler_arm64.h"
41#include "utils/assembler.h"
42#include "utils/stack_checks.h"
43
Scott Wakeling97c72b72016-06-24 16:19:36 +010044using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000045using vixl::ExactAssemblyScope;
46using vixl::CodeBufferCheckScope;
47using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010048
49#ifdef __
50#error "ARM64 Codegen VIXL macro-assembler macro already defined."
51#endif
52
Alexandre Rames5319def2014-10-23 10:03:10 +010053namespace art {
54
Roland Levillain22ccc3a2015-11-24 13:10:05 +000055template<class MirrorType>
56class GcRoot;
57
Alexandre Rames5319def2014-10-23 10:03:10 +010058namespace arm64 {
59
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::ARM64EncodableConstantOrRegister;
61using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::CPURegisterFrom;
63using helpers::DRegisterFrom;
64using helpers::FPRegisterFrom;
65using helpers::HeapOperand;
66using helpers::HeapOperandFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010067using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080068using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080069using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010070using helpers::InputRegisterAt;
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +010071using helpers::Int64FromLocation;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010072using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080073using helpers::LocationFrom;
74using helpers::OperandFromMemOperand;
75using helpers::OutputCPURegister;
76using helpers::OutputFPRegister;
77using helpers::OutputRegister;
Artem Serovd4bccf12017-04-03 18:47:32 +010078using helpers::QRegisterFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080079using helpers::RegisterFrom;
80using helpers::StackOperandFrom;
81using helpers::VIXLRegCodeFromART;
82using helpers::WRegisterFrom;
83using helpers::XRegisterFrom;
84
Vladimir Markof3e0ee22015-12-17 15:23:13 +000085// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080086// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
87// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000088static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010089
Vladimir Markof4f2daa2017-03-20 18:26:59 +000090// Reference load (except object array loads) is using LDR Wt, [Xn, #offset] which can handle
91// offset < 16KiB. For offsets >= 16KiB, the load shall be emitted as two or more instructions.
Vladimir Marko008e09f32018-08-06 15:42:43 +010092// For the Baker read barrier implementation using link-time generated thunks we need to split
Vladimir Markof4f2daa2017-03-20 18:26:59 +000093// the offset explicitly.
94constexpr uint32_t kReferenceLoadMinFarOffset = 16 * KB;
95
Alexandre Rames5319def2014-10-23 10:03:10 +010096inline Condition ARM64Condition(IfCondition cond) {
97 switch (cond) {
98 case kCondEQ: return eq;
99 case kCondNE: return ne;
100 case kCondLT: return lt;
101 case kCondLE: return le;
102 case kCondGT: return gt;
103 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -0700104 case kCondB: return lo;
105 case kCondBE: return ls;
106 case kCondA: return hi;
107 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +0100108 }
Roland Levillain7f63c522015-07-13 15:54:55 +0000109 LOG(FATAL) << "Unreachable";
110 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +0100111}
112
Vladimir Markod6e069b2016-01-18 11:11:01 +0000113inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
114 // The ARM64 condition codes can express all the necessary branches, see the
115 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
116 // There is no dex instruction or HIR that would need the missing conditions
117 // "equal or unordered" or "not equal".
118 switch (cond) {
119 case kCondEQ: return eq;
120 case kCondNE: return ne /* unordered */;
121 case kCondLT: return gt_bias ? cc : lt /* unordered */;
122 case kCondLE: return gt_bias ? ls : le /* unordered */;
123 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
124 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
125 default:
126 LOG(FATAL) << "UNREACHABLE";
127 UNREACHABLE();
128 }
129}
130
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100131Location ARM64ReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
133 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
134 // but we use the exact registers for clarity.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100135 if (return_type == DataType::Type::kFloat32) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000136 return LocationFrom(s0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100137 } else if (return_type == DataType::Type::kFloat64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000138 return LocationFrom(d0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100139 } else if (return_type == DataType::Type::kInt64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000140 return LocationFrom(x0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100141 } else if (return_type == DataType::Type::kVoid) {
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100142 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000143 } else {
144 return LocationFrom(w0);
145 }
146}
147
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100148Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000149 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100150}
151
Vladimir Marko3232dbb2018-07-25 15:42:46 +0100152static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
153 InvokeRuntimeCallingConvention calling_convention;
154 RegisterSet caller_saves = RegisterSet::Empty();
155 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
156 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
157 RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference),
158 DataType::Type::kReference).GetCode());
159 return caller_saves;
160}
161
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100162// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
163#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700164#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100165
Zheng Xuda403092015-04-24 17:35:39 +0800166// Calculate memory accessing operand for save/restore live registers.
167static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100168 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800169 int64_t spill_offset,
170 bool is_save) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800171 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true);
172 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false);
Vladimir Marko804b03f2016-09-14 16:26:36 +0100173 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800174 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100175 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800176 codegen->GetNumberOfFloatingPointRegisters()));
177
Vladimir Marko804b03f2016-09-14 16:26:36 +0100178 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
Artem Serov7957d952017-04-04 15:44:09 +0100179 unsigned v_reg_size = codegen->GetGraph()->HasSIMD() ? kQRegSize : kDRegSize;
180 CPURegList fp_list = CPURegList(CPURegister::kVRegister, v_reg_size, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800181
182 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
183 UseScratchRegisterScope temps(masm);
184
185 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100186 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
187 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800188 int64_t reg_size = kXRegSizeInBytes;
189 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
190 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100191 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800192 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
193 // If the offset does not fit in the instruction's immediate field, use an alternate register
194 // to compute the base address(float point registers spill base address).
195 Register new_base = temps.AcquireSameSizeAs(base);
196 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
197 base = new_base;
198 spill_offset = -core_spill_size;
199 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
200 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
201 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
202 }
203
204 if (is_save) {
205 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
206 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
207 } else {
208 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
209 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
210 }
211}
212
213void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800214 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Andreas Gampe3db70682018-12-26 15:12:03 -0800215 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true);
Vladimir Marko804b03f2016-09-14 16:26:36 +0100216 for (uint32_t i : LowToHighBits(core_spills)) {
217 // If the register holds an object, update the stack mask.
218 if (locations->RegisterContainsObject(i)) {
219 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800220 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100221 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
222 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
223 saved_core_stack_offsets_[i] = stack_offset;
224 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800225 }
226
Andreas Gampe3db70682018-12-26 15:12:03 -0800227 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false);
Vladimir Marko804b03f2016-09-14 16:26:36 +0100228 for (uint32_t i : LowToHighBits(fp_spills)) {
229 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
230 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
231 saved_fpu_stack_offsets_[i] = stack_offset;
232 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800233 }
234
Vladimir Marko804b03f2016-09-14 16:26:36 +0100235 SaveRestoreLiveRegistersHelper(codegen,
236 locations,
Andreas Gampe3db70682018-12-26 15:12:03 -0800237 codegen->GetFirstRegisterSlotInSlowPath(), /* is_save= */ true);
Zheng Xuda403092015-04-24 17:35:39 +0800238}
239
240void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100241 SaveRestoreLiveRegistersHelper(codegen,
242 locations,
Andreas Gampe3db70682018-12-26 15:12:03 -0800243 codegen->GetFirstRegisterSlotInSlowPath(), /* is_save= */ false);
Zheng Xuda403092015-04-24 17:35:39 +0800244}
245
Alexandre Rames5319def2014-10-23 10:03:10 +0100246class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
247 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000248 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100249
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100250 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100251 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000252 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100253
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000255 if (instruction_->CanThrowIntoCatchBlock()) {
256 // Live registers will be restored in the catch block if caught.
257 SaveLiveRegisters(codegen, instruction_->GetLocations());
258 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000259 // We're moving two locations to locations that could overlap, so we need a parallel
260 // move resolver.
261 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100262 codegen->EmitParallelMoves(locations->InAt(0),
263 LocationFrom(calling_convention.GetRegisterAt(0)),
264 DataType::Type::kInt32,
265 locations->InAt(1),
266 LocationFrom(calling_convention.GetRegisterAt(1)),
267 DataType::Type::kInt32);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000268 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
269 ? kQuickThrowStringBounds
270 : kQuickThrowArrayBounds;
271 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100272 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800273 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100274 }
275
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100276 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100277
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100278 const char* GetDescription() const override { return "BoundsCheckSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100279
Alexandre Rames5319def2014-10-23 10:03:10 +0100280 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100281 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
282};
283
Alexandre Rames67555f72014-11-18 10:55:16 +0000284class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
285 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000286 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000287
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100288 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames67555f72014-11-18 10:55:16 +0000289 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
290 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000291 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800292 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000293 }
294
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100295 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100296
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100297 const char* GetDescription() const override { return "DivZeroCheckSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100298
Alexandre Rames67555f72014-11-18 10:55:16 +0000299 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000300 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
301};
302
303class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
304 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100305 LoadClassSlowPathARM64(HLoadClass* cls, HInstruction* at)
306 : SlowPathCodeARM64(at), cls_(cls) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000307 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100308 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexandre Rames67555f72014-11-18 10:55:16 +0000309 }
310
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100311 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000312 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000313 Location out = locations->Out();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100314 const uint32_t dex_pc = instruction_->GetDexPc();
315 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
316 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
Alexandre Rames67555f72014-11-18 10:55:16 +0000317
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100318 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames67555f72014-11-18 10:55:16 +0000319 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000320 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000321
Vladimir Markof3c52b42017-11-17 17:32:12 +0000322 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100323 if (must_resolve_type) {
324 DCHECK(IsSameDexFile(cls_->GetDexFile(), arm64_codegen->GetGraph()->GetDexFile()));
325 dex::TypeIndex type_index = cls_->GetTypeIndex();
326 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Vladimir Marko9d479252018-07-24 11:35:20 +0100327 arm64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
328 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100329 // If we also must_do_clinit, the resolved type is now in the correct register.
330 } else {
331 DCHECK(must_do_clinit);
332 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
333 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)),
334 source,
335 cls_->GetType());
336 }
337 if (must_do_clinit) {
338 arm64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
339 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800340 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000341
342 // Move the class to the desired location.
Alexandre Rames67555f72014-11-18 10:55:16 +0000343 if (out.IsValid()) {
344 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100345 DataType::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000346 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000347 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000348 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000349 __ B(GetExitLabel());
350 }
351
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100352 const char* GetDescription() const override { return "LoadClassSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100353
Alexandre Rames67555f72014-11-18 10:55:16 +0000354 private:
355 // The class this slow path will load.
356 HLoadClass* const cls_;
357
Alexandre Rames67555f72014-11-18 10:55:16 +0000358 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
359};
360
Vladimir Markoaad75c62016-10-03 08:46:48 +0000361class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
362 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000363 explicit LoadStringSlowPathARM64(HLoadString* instruction)
364 : SlowPathCodeARM64(instruction) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000365
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100366 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000367 LocationSummary* locations = instruction_->GetLocations();
368 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
369 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
370
371 __ Bind(GetEntryLabel());
372 SaveLiveRegisters(codegen, locations);
373
Vladimir Markof3c52b42017-11-17 17:32:12 +0000374 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000375 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
376 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000377 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
378 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100379 DataType::Type type = instruction_->GetType();
Vladimir Markoaad75c62016-10-03 08:46:48 +0000380 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
381
382 RestoreLiveRegisters(codegen, locations);
383
Vladimir Markoaad75c62016-10-03 08:46:48 +0000384 __ B(GetExitLabel());
385 }
386
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100387 const char* GetDescription() const override { return "LoadStringSlowPathARM64"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000388
389 private:
390 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
391};
392
Alexandre Rames5319def2014-10-23 10:03:10 +0100393class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
394 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000395 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100396
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100397 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames67555f72014-11-18 10:55:16 +0000398 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100399 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000400 if (instruction_->CanThrowIntoCatchBlock()) {
401 // Live registers will be restored in the catch block if caught.
402 SaveLiveRegisters(codegen, instruction_->GetLocations());
403 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000404 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
405 instruction_,
406 instruction_->GetDexPc(),
407 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800408 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100409 }
410
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100411 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100412
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100413 const char* GetDescription() const override { return "NullCheckSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100414
Alexandre Rames5319def2014-10-23 10:03:10 +0100415 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100416 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
417};
418
419class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
420 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100421 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000422 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100423
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100424 void EmitNativeCode(CodeGenerator* codegen) override {
Artem Serov7957d952017-04-04 15:44:09 +0100425 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +0000426 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100427 __ Bind(GetEntryLabel());
Artem Serov7957d952017-04-04 15:44:09 +0100428 SaveLiveRegisters(codegen, locations); // Only saves live 128-bit regs for SIMD.
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000429 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800430 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Artem Serov7957d952017-04-04 15:44:09 +0100431 RestoreLiveRegisters(codegen, locations); // Only restores live 128-bit regs for SIMD.
Alexandre Rames67555f72014-11-18 10:55:16 +0000432 if (successor_ == nullptr) {
433 __ B(GetReturnLabel());
434 } else {
435 __ B(arm64_codegen->GetLabelOf(successor_));
436 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100437 }
438
Scott Wakeling97c72b72016-06-24 16:19:36 +0100439 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100440 DCHECK(successor_ == nullptr);
441 return &return_label_;
442 }
443
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100444 HBasicBlock* GetSuccessor() const {
445 return successor_;
446 }
447
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100448 const char* GetDescription() const override { return "SuspendCheckSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100449
Alexandre Rames5319def2014-10-23 10:03:10 +0100450 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100451 // If not null, the block to branch to after the suspend check.
452 HBasicBlock* const successor_;
453
454 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100455 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100456
457 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
458};
459
Alexandre Rames67555f72014-11-18 10:55:16 +0000460class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
461 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000462 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000463 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000464
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100465 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000466 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800467
Alexandre Rames3e69f162014-12-10 10:36:50 +0000468 DCHECK(instruction_->IsCheckCast()
469 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
470 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100471 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000472
Alexandre Rames67555f72014-11-18 10:55:16 +0000473 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000474
Vladimir Marko87584542017-12-12 17:47:52 +0000475 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000476 SaveLiveRegisters(codegen, locations);
477 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000478
479 // We're moving two locations to locations that could overlap, so we need a parallel
480 // move resolver.
481 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800482 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800483 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100484 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800485 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800486 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100487 DataType::Type::kReference);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000488 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000489 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800490 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100491 DataType::Type ret_type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000492 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
493 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
494 } else {
495 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800496 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
497 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000498 }
499
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000500 if (!is_fatal_) {
501 RestoreLiveRegisters(codegen, locations);
502 __ B(GetExitLabel());
503 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000504 }
505
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100506 const char* GetDescription() const override { return "TypeCheckSlowPathARM64"; }
507 bool IsFatal() const override { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100508
Alexandre Rames67555f72014-11-18 10:55:16 +0000509 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000510 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000511
Alexandre Rames67555f72014-11-18 10:55:16 +0000512 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
513};
514
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700515class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
516 public:
Aart Bik42249c32016-01-07 15:33:50 -0800517 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000518 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700519
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100520 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bik42249c32016-01-07 15:33:50 -0800521 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700522 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100523 LocationSummary* locations = instruction_->GetLocations();
524 SaveLiveRegisters(codegen, locations);
525 InvokeRuntimeCallingConvention calling_convention;
526 __ Mov(calling_convention.GetRegisterAt(0),
527 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000528 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100529 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700530 }
531
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100532 const char* GetDescription() const override { return "DeoptimizationSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100533
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700534 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700535 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
536};
537
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100538class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
539 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000540 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100541
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100542 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100543 LocationSummary* locations = instruction_->GetLocations();
544 __ Bind(GetEntryLabel());
545 SaveLiveRegisters(codegen, locations);
546
547 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100548 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100549 parallel_move.AddMove(
550 locations->InAt(0),
551 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100552 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100553 nullptr);
554 parallel_move.AddMove(
555 locations->InAt(1),
556 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100557 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100558 nullptr);
559 parallel_move.AddMove(
560 locations->InAt(2),
561 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100562 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100563 nullptr);
564 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
565
566 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000567 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100568 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
569 RestoreLiveRegisters(codegen, locations);
570 __ B(GetExitLabel());
571 }
572
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100573 const char* GetDescription() const override { return "ArraySetSlowPathARM64"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100574
575 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100576 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
577};
578
Zheng Xu3927c8b2015-11-18 17:46:25 +0800579void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
580 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000581 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800582
583 // We are about to use the assembler to place literals directly. Make sure we have enough
584 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000585 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
586 num_entries * sizeof(int32_t),
587 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800588
589 __ Bind(&table_start_);
590 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
591 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100592 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800593 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100594 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800595 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
596 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
597 Literal<int32_t> literal(jump_offset);
598 __ place(&literal);
599 }
600}
601
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000602// Slow path generating a read barrier for a heap reference.
603class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
604 public:
605 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
606 Location out,
607 Location ref,
608 Location obj,
609 uint32_t offset,
610 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000611 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000612 out_(out),
613 ref_(ref),
614 obj_(obj),
615 offset_(offset),
616 index_(index) {
617 DCHECK(kEmitCompilerReadBarrier);
618 // If `obj` is equal to `out` or `ref`, it means the initial object
619 // has been overwritten by (or after) the heap object reference load
620 // to be instrumented, e.g.:
621 //
622 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000623 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000624 //
625 // In that case, we have lost the information about the original
626 // object, and the emitted read barrier cannot work properly.
627 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
628 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
629 }
630
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100631 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000632 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
633 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100634 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000635 DCHECK(locations->CanCall());
636 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100637 DCHECK(instruction_->IsInstanceFieldGet() ||
638 instruction_->IsStaticFieldGet() ||
639 instruction_->IsArrayGet() ||
640 instruction_->IsInstanceOf() ||
641 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700642 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000643 << "Unexpected instruction in read barrier for heap reference slow path: "
644 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +0000645 // The read barrier instrumentation of object ArrayGet
646 // instructions does not support the HIntermediateAddress
647 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000648 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100649 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000650
651 __ Bind(GetEntryLabel());
652
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000653 SaveLiveRegisters(codegen, locations);
654
655 // We may have to change the index's value, but as `index_` is a
656 // constant member (like other "inputs" of this slow path),
657 // introduce a copy of it, `index`.
658 Location index = index_;
659 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100660 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000661 if (instruction_->IsArrayGet()) {
662 // Compute the actual memory offset and store it in `index`.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100663 Register index_reg = RegisterFrom(index_, DataType::Type::kInt32);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000664 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
665 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
666 // We are about to change the value of `index_reg` (see the
667 // calls to vixl::MacroAssembler::Lsl and
668 // vixl::MacroAssembler::Mov below), but it has
669 // not been saved by the previous call to
670 // art::SlowPathCode::SaveLiveRegisters, as it is a
671 // callee-save register --
672 // art::SlowPathCode::SaveLiveRegisters does not consider
673 // callee-save registers, as it has been designed with the
674 // assumption that callee-save registers are supposed to be
675 // handled by the called function. So, as a callee-save
676 // register, `index_reg` _would_ eventually be saved onto
677 // the stack, but it would be too late: we would have
678 // changed its value earlier. Therefore, we manually save
679 // it here into another freely available register,
680 // `free_reg`, chosen of course among the caller-save
681 // registers (as a callee-save `free_reg` register would
682 // exhibit the same problem).
683 //
684 // Note we could have requested a temporary register from
685 // the register allocator instead; but we prefer not to, as
686 // this is a slow path, and we know we can find a
687 // caller-save register that is available.
688 Register free_reg = FindAvailableCallerSaveRegister(codegen);
689 __ Mov(free_reg.W(), index_reg);
690 index_reg = free_reg;
691 index = LocationFrom(index_reg);
692 } else {
693 // The initial register stored in `index_` has already been
694 // saved in the call to art::SlowPathCode::SaveLiveRegisters
695 // (as it is not a callee-save register), so we can freely
696 // use it.
697 }
698 // Shifting the index value contained in `index_reg` by the scale
699 // factor (2) cannot overflow in practice, as the runtime is
700 // unable to allocate object arrays with a size larger than
701 // 2^26 - 1 (that is, 2^28 - 4 bytes).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100702 __ Lsl(index_reg, index_reg, DataType::SizeShift(type));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000703 static_assert(
704 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
705 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
706 __ Add(index_reg, index_reg, Operand(offset_));
707 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100708 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
709 // intrinsics, `index_` is not shifted by a scale factor of 2
710 // (as in the case of ArrayGet), as it is actually an offset
711 // to an object field within an object.
712 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000713 DCHECK(instruction_->GetLocations()->Intrinsified());
714 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
715 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
716 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100717 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +0100718 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000719 }
720 }
721
722 // We're moving two or three locations to locations that could
723 // overlap, so we need a parallel move resolver.
724 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100725 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000726 parallel_move.AddMove(ref_,
727 LocationFrom(calling_convention.GetRegisterAt(0)),
728 type,
729 nullptr);
730 parallel_move.AddMove(obj_,
731 LocationFrom(calling_convention.GetRegisterAt(1)),
732 type,
733 nullptr);
734 if (index.IsValid()) {
735 parallel_move.AddMove(index,
736 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100737 DataType::Type::kInt32,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000738 nullptr);
739 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
740 } else {
741 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
742 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
743 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000744 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000745 instruction_,
746 instruction_->GetDexPc(),
747 this);
748 CheckEntrypointTypes<
749 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
750 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
751
752 RestoreLiveRegisters(codegen, locations);
753
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000754 __ B(GetExitLabel());
755 }
756
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100757 const char* GetDescription() const override { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000758
759 private:
760 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100761 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
762 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000763 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
764 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
765 return Register(VIXLRegCodeFromART(i), kXRegSize);
766 }
767 }
768 // We shall never fail to find a free caller-save register, as
769 // there are more than two core caller-save registers on ARM64
770 // (meaning it is possible to find one which is different from
771 // `ref` and `obj`).
772 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
773 LOG(FATAL) << "Could not find a free register";
774 UNREACHABLE();
775 }
776
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000777 const Location out_;
778 const Location ref_;
779 const Location obj_;
780 const uint32_t offset_;
781 // An additional location containing an index to an array.
782 // Only used for HArrayGet and the UnsafeGetObject &
783 // UnsafeGetObjectVolatile intrinsics.
784 const Location index_;
785
786 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
787};
788
789// Slow path generating a read barrier for a GC root.
790class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
791 public:
792 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000793 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000794 DCHECK(kEmitCompilerReadBarrier);
795 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000796
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100797 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000798 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100799 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000800 DCHECK(locations->CanCall());
801 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000802 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
803 << "Unexpected instruction in read barrier for GC root slow path: "
804 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000805
806 __ Bind(GetEntryLabel());
807 SaveLiveRegisters(codegen, locations);
808
809 InvokeRuntimeCallingConvention calling_convention;
810 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
811 // The argument of the ReadBarrierForRootSlow is not a managed
812 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
813 // thus we need a 64-bit move here, and we cannot use
814 //
815 // arm64_codegen->MoveLocation(
816 // LocationFrom(calling_convention.GetRegisterAt(0)),
817 // root_,
818 // type);
819 //
820 // which would emit a 32-bit move, as `type` is a (32-bit wide)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100821 // reference type (`DataType::Type::kReference`).
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000822 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000823 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000824 instruction_,
825 instruction_->GetDexPc(),
826 this);
827 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
828 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
829
830 RestoreLiveRegisters(codegen, locations);
831 __ B(GetExitLabel());
832 }
833
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100834 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathARM64"; }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000835
836 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000837 const Location out_;
838 const Location root_;
839
840 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
841};
842
Alexandre Rames5319def2014-10-23 10:03:10 +0100843#undef __
844
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100845Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(DataType::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100846 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100847 if (type == DataType::Type::kVoid) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100848 LOG(FATAL) << "Unreachable type " << type;
849 }
850
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100851 if (DataType::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100852 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
853 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100854 } else if (!DataType::IsFloatingPointType(type) &&
Alexandre Rames542361f2015-01-29 16:57:31 +0000855 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000856 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
857 } else {
858 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100859 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
860 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100861 }
862
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000863 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100864 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100865 return next_location;
866}
867
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100868Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100869 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100870}
871
Serban Constantinescu579885a2015-02-22 20:51:33 +0000872CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100873 const CompilerOptions& compiler_options,
874 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100875 : CodeGenerator(graph,
876 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000877 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000878 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100879 callee_saved_core_registers.GetList(),
880 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100881 compiler_options,
882 stats),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100883 block_labels_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
884 jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100885 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000886 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100887 move_resolver_(graph->GetAllocator(), this),
Artem Serovaa6f4832018-11-21 18:57:54 +0000888 assembler_(graph->GetAllocator(),
889 compiler_options.GetInstructionSetFeatures()->AsArm64InstructionSetFeatures()),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000890 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100891 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000892 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100893 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000894 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100895 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko2d06e022019-07-08 15:45:19 +0100896 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markof6675082019-05-17 12:05:28 +0100897 call_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100898 baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markof6675082019-05-17 12:05:28 +0100899 uint32_literals_(std::less<uint32_t>(),
900 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
901 uint64_literals_(std::less<uint64_t>(),
902 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000903 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100904 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +0000905 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Marko966b46f2018-08-03 10:20:19 +0000906 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
907 jit_baker_read_barrier_slow_paths_(std::less<uint32_t>(),
908 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000909 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000910 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000911}
Alexandre Rames5319def2014-10-23 10:03:10 +0100912
Alexandre Rames67555f72014-11-18 10:55:16 +0000913#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100914
Zheng Xu3927c8b2015-11-18 17:46:25 +0800915void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100916 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800917 jump_table->EmitTable(this);
918 }
919}
920
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000921void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800922 EmitJumpTables();
Vladimir Marko966b46f2018-08-03 10:20:19 +0000923
924 // Emit JIT baker read barrier slow paths.
925 DCHECK(Runtime::Current()->UseJitCompilation() || jit_baker_read_barrier_slow_paths_.empty());
926 for (auto& entry : jit_baker_read_barrier_slow_paths_) {
927 uint32_t encoded_data = entry.first;
928 vixl::aarch64::Label* slow_path_entry = &entry.second.label;
929 __ Bind(slow_path_entry);
Andreas Gampe3db70682018-12-26 15:12:03 -0800930 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name= */ nullptr);
Vladimir Marko966b46f2018-08-03 10:20:19 +0000931 }
932
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000933 // Ensure we emit the literal pool.
934 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000935
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000936 CodeGenerator::Finalize(allocator);
Vladimir Markoca1e0382018-04-11 09:58:41 +0000937
938 // Verify Baker read barrier linker patches.
939 if (kIsDebugBuild) {
940 ArrayRef<const uint8_t> code = allocator->GetMemory();
941 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
942 DCHECK(info.label.IsBound());
943 uint32_t literal_offset = info.label.GetLocation();
944 DCHECK_ALIGNED(literal_offset, 4u);
945
946 auto GetInsn = [&code](uint32_t offset) {
947 DCHECK_ALIGNED(offset, 4u);
948 return
949 (static_cast<uint32_t>(code[offset + 0]) << 0) +
950 (static_cast<uint32_t>(code[offset + 1]) << 8) +
951 (static_cast<uint32_t>(code[offset + 2]) << 16)+
952 (static_cast<uint32_t>(code[offset + 3]) << 24);
953 };
954
955 const uint32_t encoded_data = info.custom_data;
956 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
957 // Check that the next instruction matches the expected LDR.
958 switch (kind) {
Vladimir Marko0ecac682018-08-07 10:40:38 +0100959 case BakerReadBarrierKind::kField:
960 case BakerReadBarrierKind::kAcquire: {
Vladimir Markoca1e0382018-04-11 09:58:41 +0000961 DCHECK_GE(code.size() - literal_offset, 8u);
962 uint32_t next_insn = GetInsn(literal_offset + 4u);
Vladimir Markoca1e0382018-04-11 09:58:41 +0000963 CheckValidReg(next_insn & 0x1fu); // Check destination register.
964 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Marko0ecac682018-08-07 10:40:38 +0100965 if (kind == BakerReadBarrierKind::kField) {
966 // LDR (immediate) with correct base_reg.
967 CHECK_EQ(next_insn & 0xffc003e0u, 0xb9400000u | (base_reg << 5));
968 } else {
969 DCHECK(kind == BakerReadBarrierKind::kAcquire);
970 // LDAR with correct base_reg.
971 CHECK_EQ(next_insn & 0xffffffe0u, 0x88dffc00u | (base_reg << 5));
972 }
Vladimir Markoca1e0382018-04-11 09:58:41 +0000973 break;
974 }
975 case BakerReadBarrierKind::kArray: {
976 DCHECK_GE(code.size() - literal_offset, 8u);
977 uint32_t next_insn = GetInsn(literal_offset + 4u);
978 // LDR (register) with the correct base_reg, size=10 (32-bit), option=011 (extend = LSL),
979 // and S=1 (shift amount = 2 for 32-bit version), i.e. LDR Wt, [Xn, Xm, LSL #2].
980 CheckValidReg(next_insn & 0x1fu); // Check destination register.
981 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
982 CHECK_EQ(next_insn & 0xffe0ffe0u, 0xb8607800u | (base_reg << 5));
983 CheckValidReg((next_insn >> 16) & 0x1f); // Check index register
984 break;
985 }
986 case BakerReadBarrierKind::kGcRoot: {
987 DCHECK_GE(literal_offset, 4u);
988 uint32_t prev_insn = GetInsn(literal_offset - 4u);
Vladimir Markoca1e0382018-04-11 09:58:41 +0000989 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Marko94796f82018-08-08 15:15:33 +0100990 // Usually LDR (immediate) with correct root_reg but
991 // we may have a "MOV marked, old_value" for UnsafeCASObject.
992 if ((prev_insn & 0xffe0ffff) != (0x2a0003e0 | root_reg)) { // MOV?
993 CHECK_EQ(prev_insn & 0xffc0001fu, 0xb9400000u | root_reg); // LDR?
994 }
Vladimir Markoca1e0382018-04-11 09:58:41 +0000995 break;
996 }
997 default:
998 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
999 UNREACHABLE();
1000 }
1001 }
1002 }
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001003}
1004
Zheng Xuad4450e2015-04-17 18:48:56 +08001005void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1006 // Note: There are 6 kinds of moves:
1007 // 1. constant -> GPR/FPR (non-cycle)
1008 // 2. constant -> stack (non-cycle)
1009 // 3. GPR/FPR -> GPR/FPR
1010 // 4. GPR/FPR -> stack
1011 // 5. stack -> GPR/FPR
1012 // 6. stack -> stack (non-cycle)
1013 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1014 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1015 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1016 // dependency.
1017 vixl_temps_.Open(GetVIXLAssembler());
1018}
1019
1020void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1021 vixl_temps_.Close();
1022}
1023
1024Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
Artem Serovd4bccf12017-04-03 18:47:32 +01001025 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister
1026 || kind == Location::kStackSlot || kind == Location::kDoubleStackSlot
1027 || kind == Location::kSIMDStackSlot);
1028 kind = (kind == Location::kFpuRegister || kind == Location::kSIMDStackSlot)
1029 ? Location::kFpuRegister
1030 : Location::kRegister;
Zheng Xuad4450e2015-04-17 18:48:56 +08001031 Location scratch = GetScratchLocation(kind);
1032 if (!scratch.Equals(Location::NoLocation())) {
1033 return scratch;
1034 }
1035 // Allocate from VIXL temp registers.
1036 if (kind == Location::kRegister) {
1037 scratch = LocationFrom(vixl_temps_.AcquireX());
1038 } else {
Roland Levillain952b2352017-05-03 19:49:14 +01001039 DCHECK_EQ(kind, Location::kFpuRegister);
Artem Serovd4bccf12017-04-03 18:47:32 +01001040 scratch = LocationFrom(codegen_->GetGraph()->HasSIMD()
1041 ? vixl_temps_.AcquireVRegisterOfSize(kQRegSize)
1042 : vixl_temps_.AcquireD());
Zheng Xuad4450e2015-04-17 18:48:56 +08001043 }
1044 AddScratchLocation(scratch);
1045 return scratch;
1046}
1047
1048void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1049 if (loc.IsRegister()) {
1050 vixl_temps_.Release(XRegisterFrom(loc));
1051 } else {
1052 DCHECK(loc.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001053 vixl_temps_.Release(codegen_->GetGraph()->HasSIMD() ? QRegisterFrom(loc) : DRegisterFrom(loc));
Zheng Xuad4450e2015-04-17 18:48:56 +08001054 }
1055 RemoveScratchLocation(loc);
1056}
1057
Alexandre Rames3e69f162014-12-10 10:36:50 +00001058void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001059 MoveOperands* move = moves_[index];
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001060 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001061}
1062
Alexandre Rames5319def2014-10-23 10:03:10 +01001063void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001064 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001065 __ Bind(&frame_entry_label_);
1066
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001067 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1068 UseScratchRegisterScope temps(masm);
1069 Register temp = temps.AcquireX();
1070 __ Ldrh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1071 __ Add(temp, temp, 1);
1072 __ Strh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1073 }
1074
Vladimir Marko33bff252017-11-01 14:35:42 +00001075 bool do_overflow_check =
1076 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm64) || !IsLeafMethod();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001077 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001078 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001079 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001080 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Vladimir Marko33bff252017-11-01 14:35:42 +00001081 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001082 {
1083 // Ensure that between load and RecordPcInfo there are no pools emitted.
1084 ExactAssemblyScope eas(GetVIXLAssembler(),
1085 kInstructionSize,
1086 CodeBufferCheckScope::kExactSize);
1087 __ ldr(wzr, MemOperand(temp, 0));
1088 RecordPcInfo(nullptr, 0);
1089 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001090 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001091
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001092 if (!HasEmptyFrame()) {
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001093 // Stack layout:
1094 // sp[frame_size - 8] : lr.
1095 // ... : other preserved core registers.
1096 // ... : other preserved fp registers.
1097 // ... : reserved frame space.
1098 // sp[0] : current method.
Vladimir Marko1a225a72019-07-05 13:37:42 +01001099 int32_t frame_size = dchecked_integral_cast<int32_t>(GetFrameSize());
1100 uint32_t core_spills_offset = frame_size - GetCoreSpillSize();
1101 CPURegList preserved_core_registers = GetFramePreservedCoreRegisters();
1102 DCHECK(!preserved_core_registers.IsEmpty());
1103 uint32_t fp_spills_offset = frame_size - FrameEntrySpillSize();
1104 CPURegList preserved_fp_registers = GetFramePreservedFPRegisters();
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001105
Vladimir Marko1a225a72019-07-05 13:37:42 +01001106 // Save the current method if we need it, or if using STP reduces code
1107 // size. Note that we do not do this in HCurrentMethod, as the
1108 // instruction might have been removed in the SSA graph.
1109 CPURegister lowest_spill;
1110 if (core_spills_offset == kXRegSizeInBytes) {
1111 // If there is no gap between the method and the lowest core spill, use
1112 // aligned STP pre-index to store both. Max difference is 512. We do
1113 // that to reduce code size even if we do not have to save the method.
1114 DCHECK_LE(frame_size, 512); // 32 core registers are only 256 bytes.
1115 lowest_spill = preserved_core_registers.PopLowestIndex();
1116 __ Stp(kArtMethodRegister, lowest_spill, MemOperand(sp, -frame_size, PreIndex));
1117 } else if (RequiresCurrentMethod()) {
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001118 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001119 } else {
1120 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001121 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001122 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Vladimir Marko1a225a72019-07-05 13:37:42 +01001123 if (lowest_spill.IsValid()) {
1124 GetAssembler()->cfi().RelOffset(DWARFReg(lowest_spill), core_spills_offset);
1125 core_spills_offset += kXRegSizeInBytes;
1126 }
1127 GetAssembler()->SpillRegisters(preserved_core_registers, core_spills_offset);
1128 GetAssembler()->SpillRegisters(preserved_fp_registers, fp_spills_offset);
Mingyao Yang063fc772016-08-02 11:02:54 -07001129
1130 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1131 // Initialize should_deoptimize flag to 0.
1132 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1133 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1134 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001135 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01001136
Andreas Gampe3db70682018-12-26 15:12:03 -08001137 MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01001138}
1139
1140void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001141 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001142 if (!HasEmptyFrame()) {
Vladimir Marko1a225a72019-07-05 13:37:42 +01001143 int32_t frame_size = dchecked_integral_cast<int32_t>(GetFrameSize());
1144 uint32_t core_spills_offset = frame_size - GetCoreSpillSize();
1145 CPURegList preserved_core_registers = GetFramePreservedCoreRegisters();
1146 DCHECK(!preserved_core_registers.IsEmpty());
1147 uint32_t fp_spills_offset = frame_size - FrameEntrySpillSize();
1148 CPURegList preserved_fp_registers = GetFramePreservedFPRegisters();
1149
1150 CPURegister lowest_spill;
1151 if (core_spills_offset == kXRegSizeInBytes) {
1152 // If there is no gap between the method and the lowest core spill, use
1153 // aligned LDP pre-index to pop both. Max difference is 504. We do
1154 // that to reduce code size even though the loaded method is unused.
1155 DCHECK_LE(frame_size, 504); // 32 core registers are only 256 bytes.
1156 lowest_spill = preserved_core_registers.PopLowestIndex();
1157 core_spills_offset += kXRegSizeInBytes;
1158 }
1159 GetAssembler()->UnspillRegisters(preserved_fp_registers, fp_spills_offset);
1160 GetAssembler()->UnspillRegisters(preserved_core_registers, core_spills_offset);
1161 if (lowest_spill.IsValid()) {
1162 __ Ldp(xzr, lowest_spill, MemOperand(sp, frame_size, PostIndex));
1163 GetAssembler()->cfi().Restore(DWARFReg(lowest_spill));
1164 } else {
1165 __ Drop(frame_size);
1166 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001167 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001168 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001169 __ Ret();
1170 GetAssembler()->cfi().RestoreState();
1171 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001172}
1173
Scott Wakeling97c72b72016-06-24 16:19:36 +01001174CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001175 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001176 return CPURegList(CPURegister::kRegister, kXRegSize,
1177 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001178}
1179
Scott Wakeling97c72b72016-06-24 16:19:36 +01001180CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001181 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1182 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001183 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1184 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001185}
1186
Alexandre Rames5319def2014-10-23 10:03:10 +01001187void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1188 __ Bind(GetLabelOf(block));
1189}
1190
Calin Juravle175dc732015-08-25 15:42:32 +01001191void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1192 DCHECK(location.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001193 __ Mov(RegisterFrom(location, DataType::Type::kInt32), value);
Calin Juravle175dc732015-08-25 15:42:32 +01001194}
1195
Calin Juravlee460d1d2015-09-29 04:52:17 +01001196void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1197 if (location.IsRegister()) {
1198 locations->AddTemp(location);
1199 } else {
1200 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1201 }
1202}
1203
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001204void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001205 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001206 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001207 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001208 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001209 if (value_can_be_null) {
1210 __ Cbz(value, &done);
1211 }
Roland Levillainc73f0522018-08-14 15:16:50 +01001212 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07001213 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Roland Levillainc73f0522018-08-14 15:16:50 +01001214 // Calculate the offset (in the card table) of the card corresponding to
1215 // `object`.
Alexandre Rames5319def2014-10-23 10:03:10 +01001216 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Roland Levillainc73f0522018-08-14 15:16:50 +01001217 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
1218 // `object`'s card.
1219 //
1220 // Register `card` contains the address of the card table. Note that the card
1221 // table's base is biased during its creation so that it always starts at an
1222 // address whose least-significant byte is equal to `kCardDirty` (see
1223 // art::gc::accounting::CardTable::Create). Therefore the STRB instruction
1224 // below writes the `kCardDirty` (byte) value into the `object`'s card
1225 // (located at `card + object >> kCardShift`).
1226 //
1227 // This dual use of the value in register `card` (1. to calculate the location
1228 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
1229 // (no need to explicitly load `kCardDirty` as an immediate value).
Serban Constantinescu02164b32014-11-13 14:05:07 +00001230 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001231 if (value_can_be_null) {
1232 __ Bind(&done);
1233 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001234}
1235
David Brazdil58282f42016-01-14 12:45:10 +00001236void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001237 // Blocked core registers:
1238 // lr : Runtime reserved.
1239 // tr : Runtime reserved.
Roland Levillain97c46462017-05-11 14:04:03 +01001240 // mr : Runtime reserved.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001241 // ip1 : VIXL core temp.
1242 // ip0 : VIXL core temp.
Peter Collingbournebd8e10c2018-04-12 16:39:55 -07001243 // x18 : Platform register.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001244 //
1245 // Blocked fp registers:
1246 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001247 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1248 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001249 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001250 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001251 }
Peter Collingbournebd8e10c2018-04-12 16:39:55 -07001252 blocked_core_registers_[X18] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001253
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001254 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001255 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001256 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001257 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001258
David Brazdil58282f42016-01-14 12:45:10 +00001259 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001260 // Stubs do not save callee-save floating point registers. If the graph
1261 // is debuggable, we need to deal with these registers differently. For
1262 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001263 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1264 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001265 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001266 }
1267 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001268}
1269
Alexandre Rames3e69f162014-12-10 10:36:50 +00001270size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1271 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1272 __ Str(reg, MemOperand(sp, stack_index));
1273 return kArm64WordSize;
1274}
1275
1276size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1277 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1278 __ Ldr(reg, MemOperand(sp, stack_index));
1279 return kArm64WordSize;
1280}
1281
1282size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1283 FPRegister reg = FPRegister(reg_id, kDRegSize);
1284 __ Str(reg, MemOperand(sp, stack_index));
1285 return kArm64WordSize;
1286}
1287
1288size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1289 FPRegister reg = FPRegister(reg_id, kDRegSize);
1290 __ Ldr(reg, MemOperand(sp, stack_index));
1291 return kArm64WordSize;
1292}
1293
Alexandre Rames5319def2014-10-23 10:03:10 +01001294void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001295 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001296}
1297
1298void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001299 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001300}
1301
Vladimir Markoa0431112018-06-25 09:32:54 +01001302const Arm64InstructionSetFeatures& CodeGeneratorARM64::GetInstructionSetFeatures() const {
1303 return *GetCompilerOptions().GetInstructionSetFeatures()->AsArm64InstructionSetFeatures();
1304}
1305
Alexandre Rames67555f72014-11-18 10:55:16 +00001306void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001307 if (constant->IsIntConstant()) {
1308 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1309 } else if (constant->IsLongConstant()) {
1310 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1311 } else if (constant->IsNullConstant()) {
1312 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001313 } else if (constant->IsFloatConstant()) {
1314 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1315 } else {
1316 DCHECK(constant->IsDoubleConstant());
1317 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1318 }
1319}
1320
Alexandre Rames3e69f162014-12-10 10:36:50 +00001321
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001322static bool CoherentConstantAndType(Location constant, DataType::Type type) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001323 DCHECK(constant.IsConstant());
1324 HConstant* cst = constant.GetConstant();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001325 return (cst->IsIntConstant() && type == DataType::Type::kInt32) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001326 // Null is mapped to a core W register, which we associate with kPrimInt.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001327 (cst->IsNullConstant() && type == DataType::Type::kInt32) ||
1328 (cst->IsLongConstant() && type == DataType::Type::kInt64) ||
1329 (cst->IsFloatConstant() && type == DataType::Type::kFloat32) ||
1330 (cst->IsDoubleConstant() && type == DataType::Type::kFloat64);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001331}
1332
Roland Levillain952b2352017-05-03 19:49:14 +01001333// Allocate a scratch register from the VIXL pool, querying first
1334// the floating-point register pool, and then the core register
1335// pool. This is essentially a reimplementation of
Roland Levillain558dea12017-01-27 19:40:44 +00001336// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1337// using a different allocation strategy.
1338static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1339 vixl::aarch64::UseScratchRegisterScope* temps,
1340 int size_in_bits) {
1341 return masm->GetScratchFPRegisterList()->IsEmpty()
1342 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1343 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1344}
1345
Calin Juravlee460d1d2015-09-29 04:52:17 +01001346void CodeGeneratorARM64::MoveLocation(Location destination,
1347 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001348 DataType::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001349 if (source.Equals(destination)) {
1350 return;
1351 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001352
1353 // A valid move can always be inferred from the destination and source
1354 // locations. When moving from and to a register, the argument type can be
1355 // used to generate 32bit instead of 64bit moves. In debug mode we also
1356 // checks the coherency of the locations and the type.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001357 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001358
1359 if (destination.IsRegister() || destination.IsFpuRegister()) {
1360 if (unspecified_type) {
1361 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1362 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001363 (src_cst != nullptr && (src_cst->IsIntConstant()
1364 || src_cst->IsFloatConstant()
1365 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001366 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001367 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexandre Rames67555f72014-11-18 10:55:16 +00001368 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001369 // If the source is a double stack slot or a 64bit constant, a 64bit
1370 // type is appropriate. Else the source is a register, and since the
1371 // type has not been specified, we chose a 64bit type to force a 64bit
1372 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001373 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexandre Rames67555f72014-11-18 10:55:16 +00001374 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001375 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001376 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1377 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001378 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001379 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1380 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1381 __ Ldr(dst, StackOperandFrom(source));
Artem Serovd4bccf12017-04-03 18:47:32 +01001382 } else if (source.IsSIMDStackSlot()) {
1383 __ Ldr(QRegisterFrom(destination), StackOperandFrom(source));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001384 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001385 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001386 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001387 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001388 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001389 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001390 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001391 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001392 DataType::Type source_type = DataType::Is64BitType(dst_type)
1393 ? DataType::Type::kInt64
1394 : DataType::Type::kInt32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001395 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1396 }
1397 } else {
1398 DCHECK(source.IsFpuRegister());
1399 if (destination.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001400 DataType::Type source_type = DataType::Is64BitType(dst_type)
1401 ? DataType::Type::kFloat64
1402 : DataType::Type::kFloat32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001403 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1404 } else {
1405 DCHECK(destination.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001406 if (GetGraph()->HasSIMD()) {
1407 __ Mov(QRegisterFrom(destination), QRegisterFrom(source));
1408 } else {
1409 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
1410 }
1411 }
1412 }
1413 } else if (destination.IsSIMDStackSlot()) {
1414 if (source.IsFpuRegister()) {
1415 __ Str(QRegisterFrom(source), StackOperandFrom(destination));
1416 } else {
1417 DCHECK(source.IsSIMDStackSlot());
1418 UseScratchRegisterScope temps(GetVIXLAssembler());
1419 if (GetVIXLAssembler()->GetScratchFPRegisterList()->IsEmpty()) {
1420 Register temp = temps.AcquireX();
1421 __ Ldr(temp, MemOperand(sp, source.GetStackIndex()));
1422 __ Str(temp, MemOperand(sp, destination.GetStackIndex()));
1423 __ Ldr(temp, MemOperand(sp, source.GetStackIndex() + kArm64WordSize));
1424 __ Str(temp, MemOperand(sp, destination.GetStackIndex() + kArm64WordSize));
1425 } else {
1426 FPRegister temp = temps.AcquireVRegisterOfSize(kQRegSize);
1427 __ Ldr(temp, StackOperandFrom(source));
1428 __ Str(temp, StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001429 }
1430 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001431 } else { // The destination is not a register. It must be a stack slot.
1432 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1433 if (source.IsRegister() || source.IsFpuRegister()) {
1434 if (unspecified_type) {
1435 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001436 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001437 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001438 dst_type =
1439 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001440 }
1441 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001442 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1443 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001444 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001445 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001446 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1447 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001448 UseScratchRegisterScope temps(GetVIXLAssembler());
1449 HConstant* src_cst = source.GetConstant();
1450 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001451 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001452 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1453 ? Register(xzr)
1454 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001455 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001456 if (src_cst->IsIntConstant()) {
1457 temp = temps.AcquireW();
1458 } else if (src_cst->IsLongConstant()) {
1459 temp = temps.AcquireX();
1460 } else if (src_cst->IsFloatConstant()) {
1461 temp = temps.AcquireS();
1462 } else {
1463 DCHECK(src_cst->IsDoubleConstant());
1464 temp = temps.AcquireD();
1465 }
1466 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001467 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001468 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001469 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001470 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001471 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001472 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001473 // Use any scratch register (a core or a floating-point one)
1474 // from VIXL scratch register pools as a temporary.
1475 //
1476 // We used to only use the FP scratch register pool, but in some
1477 // rare cases the only register from this pool (D31) would
1478 // already be used (e.g. within a ParallelMove instruction, when
1479 // a move is blocked by a another move requiring a scratch FP
1480 // register, which would reserve D31). To prevent this issue, we
1481 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001482 //
1483 // Also, we start by asking for a FP scratch register first, as the
Roland Levillain952b2352017-05-03 19:49:14 +01001484 // demand of scratch core registers is higher. This is why we
Roland Levillain558dea12017-01-27 19:40:44 +00001485 // use AcquireFPOrCoreCPURegisterOfSize instead of
1486 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1487 // allocates core scratch registers first.
1488 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1489 GetVIXLAssembler(),
1490 &temps,
1491 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001492 __ Ldr(temp, StackOperandFrom(source));
1493 __ Str(temp, StackOperandFrom(destination));
1494 }
1495 }
1496}
1497
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001498void CodeGeneratorARM64::Load(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001499 CPURegister dst,
1500 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001501 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001502 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001503 case DataType::Type::kUint8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001504 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001505 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001506 case DataType::Type::kInt8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001507 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001508 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001509 case DataType::Type::kUint16:
Alexandre Rames67555f72014-11-18 10:55:16 +00001510 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001511 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001512 case DataType::Type::kInt16:
1513 __ Ldrsh(Register(dst), src);
1514 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001515 case DataType::Type::kInt32:
1516 case DataType::Type::kReference:
1517 case DataType::Type::kInt64:
1518 case DataType::Type::kFloat32:
1519 case DataType::Type::kFloat64:
1520 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001521 __ Ldr(dst, src);
1522 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001523 case DataType::Type::kUint32:
1524 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001525 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001526 LOG(FATAL) << "Unreachable type " << type;
1527 }
1528}
1529
Calin Juravle77520bc2015-01-12 18:45:46 +00001530void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001531 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001532 const MemOperand& src,
1533 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001534 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001535 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001536 Register temp_base = temps.AcquireX();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001537 DataType::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001538
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001539 DCHECK(!src.IsPreIndex());
1540 DCHECK(!src.IsPostIndex());
1541
1542 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001543 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001544 {
1545 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1546 MemOperand base = MemOperand(temp_base);
1547 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001548 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001549 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001550 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001551 {
1552 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1553 __ ldarb(Register(dst), base);
1554 if (needs_null_check) {
1555 MaybeRecordImplicitNullCheck(instruction);
1556 }
1557 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001558 if (type == DataType::Type::kInt8) {
1559 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
Artem Serov914d7a82017-02-07 14:33:49 +00001560 }
1561 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001562 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001563 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00001564 {
1565 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1566 __ ldarh(Register(dst), base);
1567 if (needs_null_check) {
1568 MaybeRecordImplicitNullCheck(instruction);
1569 }
1570 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001571 if (type == DataType::Type::kInt16) {
1572 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
1573 }
Artem Serov914d7a82017-02-07 14:33:49 +00001574 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001575 case DataType::Type::kInt32:
1576 case DataType::Type::kReference:
1577 case DataType::Type::kInt64:
1578 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001579 {
1580 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1581 __ ldar(Register(dst), base);
1582 if (needs_null_check) {
1583 MaybeRecordImplicitNullCheck(instruction);
1584 }
1585 }
1586 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001587 case DataType::Type::kFloat32:
1588 case DataType::Type::kFloat64: {
Artem Serov914d7a82017-02-07 14:33:49 +00001589 DCHECK(dst.IsFPRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001590 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001591
Artem Serov914d7a82017-02-07 14:33:49 +00001592 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1593 {
1594 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1595 __ ldar(temp, base);
1596 if (needs_null_check) {
1597 MaybeRecordImplicitNullCheck(instruction);
1598 }
1599 }
1600 __ Fmov(FPRegister(dst), temp);
1601 break;
Roland Levillain44015862016-01-22 11:47:17 +00001602 }
Aart Bik66c158e2018-01-31 12:55:04 -08001603 case DataType::Type::kUint32:
1604 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001605 case DataType::Type::kVoid:
Artem Serov914d7a82017-02-07 14:33:49 +00001606 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001607 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001608 }
1609}
1610
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001611void CodeGeneratorARM64::Store(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001612 CPURegister src,
1613 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001614 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001615 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001616 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001617 case DataType::Type::kInt8:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001618 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001619 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001620 case DataType::Type::kUint16:
1621 case DataType::Type::kInt16:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001622 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001623 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001624 case DataType::Type::kInt32:
1625 case DataType::Type::kReference:
1626 case DataType::Type::kInt64:
1627 case DataType::Type::kFloat32:
1628 case DataType::Type::kFloat64:
1629 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001630 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001631 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001632 case DataType::Type::kUint32:
1633 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001634 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001635 LOG(FATAL) << "Unreachable type " << type;
1636 }
1637}
1638
Artem Serov914d7a82017-02-07 14:33:49 +00001639void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001640 DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001641 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00001642 const MemOperand& dst,
1643 bool needs_null_check) {
1644 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001645 UseScratchRegisterScope temps(GetVIXLAssembler());
1646 Register temp_base = temps.AcquireX();
1647
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001648 DCHECK(!dst.IsPreIndex());
1649 DCHECK(!dst.IsPostIndex());
1650
1651 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001652 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001653 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001654 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00001655 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001656 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001657 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001658 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001659 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001660 {
1661 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1662 __ stlrb(Register(src), base);
1663 if (needs_null_check) {
1664 MaybeRecordImplicitNullCheck(instruction);
1665 }
1666 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001667 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001668 case DataType::Type::kUint16:
1669 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00001670 {
1671 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1672 __ stlrh(Register(src), base);
1673 if (needs_null_check) {
1674 MaybeRecordImplicitNullCheck(instruction);
1675 }
1676 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001677 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001678 case DataType::Type::kInt32:
1679 case DataType::Type::kReference:
1680 case DataType::Type::kInt64:
1681 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001682 {
1683 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1684 __ stlr(Register(src), base);
1685 if (needs_null_check) {
1686 MaybeRecordImplicitNullCheck(instruction);
1687 }
1688 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001689 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001690 case DataType::Type::kFloat32:
1691 case DataType::Type::kFloat64: {
1692 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001693 Register temp_src;
1694 if (src.IsZero()) {
1695 // The zero register is used to avoid synthesizing zero constants.
1696 temp_src = Register(src);
1697 } else {
1698 DCHECK(src.IsFPRegister());
1699 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1700 __ Fmov(temp_src, FPRegister(src));
1701 }
Artem Serov914d7a82017-02-07 14:33:49 +00001702 {
1703 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1704 __ stlr(temp_src, base);
1705 if (needs_null_check) {
1706 MaybeRecordImplicitNullCheck(instruction);
1707 }
1708 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001709 break;
1710 }
Aart Bik66c158e2018-01-31 12:55:04 -08001711 case DataType::Type::kUint32:
1712 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001713 case DataType::Type::kVoid:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001714 LOG(FATAL) << "Unreachable type " << type;
1715 }
1716}
1717
Calin Juravle175dc732015-08-25 15:42:32 +01001718void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1719 HInstruction* instruction,
1720 uint32_t dex_pc,
1721 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001722 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00001723
Vladimir Markof6675082019-05-17 12:05:28 +01001724 ThreadOffset64 entrypoint_offset = GetThreadOffset<kArm64PointerSize>(entrypoint);
1725 // Reduce code size for AOT by using shared trampolines for slow path runtime calls across the
1726 // entire oat file. This adds an extra branch and we do not want to slow down the main path.
1727 // For JIT, thunk sharing is per-method, so the gains would be smaller or even negative.
1728 if (slow_path == nullptr || Runtime::Current()->UseJitCompilation()) {
1729 __ Ldr(lr, MemOperand(tr, entrypoint_offset.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00001730 // Ensure the pc position is recorded immediately after the `blr` instruction.
1731 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
1732 __ blr(lr);
1733 if (EntrypointRequiresStackMap(entrypoint)) {
1734 RecordPcInfo(instruction, dex_pc, slow_path);
1735 }
Vladimir Markof6675082019-05-17 12:05:28 +01001736 } else {
1737 // Ensure the pc position is recorded immediately after the `bl` instruction.
1738 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
1739 EmitEntrypointThunkCall(entrypoint_offset);
1740 if (EntrypointRequiresStackMap(entrypoint)) {
1741 RecordPcInfo(instruction, dex_pc, slow_path);
1742 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001743 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001744}
1745
Roland Levillaindec8f632016-07-22 17:10:06 +01001746void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1747 HInstruction* instruction,
1748 SlowPathCode* slow_path) {
1749 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01001750 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1751 __ Blr(lr);
1752}
1753
Alexandre Rames67555f72014-11-18 10:55:16 +00001754void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001755 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001756 UseScratchRegisterScope temps(GetVIXLAssembler());
1757 Register temp = temps.AcquireW();
Vladimir Markodc682aa2018-01-04 18:42:57 +00001758 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
1759 const size_t status_byte_offset =
1760 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
1761 constexpr uint32_t shifted_initialized_value =
1762 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001763
Serban Constantinescu02164b32014-11-13 14:05:07 +00001764 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001765 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Vladimir Markodc682aa2018-01-04 18:42:57 +00001766 __ Add(temp, class_reg, status_byte_offset);
Igor Murashkin86083f72017-10-27 10:59:04 -07001767 __ Ldarb(temp, HeapOperand(temp));
Vladimir Markodc682aa2018-01-04 18:42:57 +00001768 __ Cmp(temp, shifted_initialized_value);
Vladimir Marko2c64a832018-01-04 11:31:56 +00001769 __ B(lo, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001770 __ Bind(slow_path->GetExitLabel());
1771}
Alexandre Rames5319def2014-10-23 10:03:10 +01001772
Vladimir Marko175e7862018-03-27 09:03:13 +00001773void InstructionCodeGeneratorARM64::GenerateBitstringTypeCheckCompare(
1774 HTypeCheckInstruction* check, vixl::aarch64::Register temp) {
1775 uint32_t path_to_root = check->GetBitstringPathToRoot();
1776 uint32_t mask = check->GetBitstringMask();
1777 DCHECK(IsPowerOfTwo(mask + 1));
1778 size_t mask_bits = WhichPowerOf2(mask + 1);
1779
1780 if (mask_bits == 16u) {
1781 // Load only the bitstring part of the status word.
1782 __ Ldrh(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
1783 } else {
1784 // /* uint32_t */ temp = temp->status_
1785 __ Ldr(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
1786 // Extract the bitstring bits.
1787 __ Ubfx(temp, temp, 0, mask_bits);
1788 }
1789 // Compare the bitstring bits to `path_to_root`.
1790 __ Cmp(temp, path_to_root);
1791}
1792
Roland Levillain44015862016-01-22 11:47:17 +00001793void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001794 BarrierType type = BarrierAll;
1795
1796 switch (kind) {
1797 case MemBarrierKind::kAnyAny:
1798 case MemBarrierKind::kAnyStore: {
1799 type = BarrierAll;
1800 break;
1801 }
1802 case MemBarrierKind::kLoadAny: {
1803 type = BarrierReads;
1804 break;
1805 }
1806 case MemBarrierKind::kStoreStore: {
1807 type = BarrierWrites;
1808 break;
1809 }
1810 default:
1811 LOG(FATAL) << "Unexpected memory barrier " << kind;
1812 }
1813 __ Dmb(InnerShareable, type);
1814}
1815
Serban Constantinescu02164b32014-11-13 14:05:07 +00001816void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1817 HBasicBlock* successor) {
1818 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001819 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1820 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001821 slow_path =
1822 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathARM64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001823 instruction->SetSlowPath(slow_path);
1824 codegen_->AddSlowPath(slow_path);
1825 if (successor != nullptr) {
1826 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001827 }
1828 } else {
1829 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1830 }
1831
Serban Constantinescu02164b32014-11-13 14:05:07 +00001832 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1833 Register temp = temps.AcquireW();
1834
Andreas Gampe542451c2016-07-26 09:02:02 -07001835 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001836 if (successor == nullptr) {
1837 __ Cbnz(temp, slow_path->GetEntryLabel());
1838 __ Bind(slow_path->GetReturnLabel());
1839 } else {
1840 __ Cbz(temp, codegen_->GetLabelOf(successor));
1841 __ B(slow_path->GetEntryLabel());
1842 // slow_path will return to GetLabelOf(successor).
1843 }
1844}
1845
Alexandre Rames5319def2014-10-23 10:03:10 +01001846InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1847 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001848 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001849 assembler_(codegen->GetAssembler()),
1850 codegen_(codegen) {}
1851
Alexandre Rames67555f72014-11-18 10:55:16 +00001852void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001853 DCHECK_EQ(instr->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001854 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001855 DataType::Type type = instr->GetResultType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001856 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001857 case DataType::Type::kInt32:
1858 case DataType::Type::kInt64:
Alexandre Rames5319def2014-10-23 10:03:10 +01001859 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001860 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001861 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001862 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001863
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001864 case DataType::Type::kFloat32:
1865 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001866 locations->SetInAt(0, Location::RequiresFpuRegister());
1867 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001868 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001869 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001870
Alexandre Rames5319def2014-10-23 10:03:10 +01001871 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001872 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001873 }
1874}
1875
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001876void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction,
1877 const FieldInfo& field_info) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001878 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1879
1880 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001881 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Rames09a99962015-04-15 11:47:56 +01001882 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001883 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
1884 object_field_get_with_read_barrier
1885 ? LocationSummary::kCallOnSlowPath
1886 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01001887 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001888 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko0ecac682018-08-07 10:40:38 +01001889 // We need a temporary register for the read barrier load in
1890 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
1891 // only if the field is volatile or the offset is too big.
1892 if (field_info.IsVolatile() ||
1893 field_info.GetFieldOffset().Uint32Value() >= kReferenceLoadMinFarOffset) {
1894 locations->AddTemp(FixedTempLocation());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001895 }
Vladimir Marko70e97462016-08-09 11:04:26 +01001896 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001897 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001898 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001899 locations->SetOut(Location::RequiresFpuRegister());
1900 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001901 // The output overlaps for an object field get when read barriers
1902 // are enabled: we do not want the load to overwrite the object's
1903 // location, as we need it to emit the read barrier.
1904 locations->SetOut(
1905 Location::RequiresRegister(),
1906 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001907 }
1908}
1909
1910void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1911 const FieldInfo& field_info) {
1912 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001913 LocationSummary* locations = instruction->GetLocations();
1914 Location base_loc = locations->InAt(0);
1915 Location out = locations->Out();
1916 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Vladimir Marko61b92282017-10-11 13:23:17 +01001917 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
1918 DataType::Type load_type = instruction->GetType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001919 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001920
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001921 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier &&
Vladimir Marko61b92282017-10-11 13:23:17 +01001922 load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00001923 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00001924 // /* HeapReference<Object> */ out = *(base + offset)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001925 Register base = RegisterFrom(base_loc, DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001926 Location maybe_temp =
1927 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
Roland Levillain44015862016-01-22 11:47:17 +00001928 // Note that potential implicit null checks are handled in this
1929 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1930 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1931 instruction,
1932 out,
1933 base,
1934 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001935 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08001936 /* needs_null_check= */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001937 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001938 } else {
1939 // General case.
1940 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001941 // Note that a potential implicit null check is handled in this
1942 // CodeGeneratorARM64::LoadAcquire call.
1943 // NB: LoadAcquire will record the pc info if needed.
1944 codegen_->LoadAcquire(
Andreas Gampe3db70682018-12-26 15:12:03 -08001945 instruction, OutputCPURegister(instruction), field, /* needs_null_check= */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001946 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00001947 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1948 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Vladimir Marko61b92282017-10-11 13:23:17 +01001949 codegen_->Load(load_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001950 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001951 }
Vladimir Marko61b92282017-10-11 13:23:17 +01001952 if (load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00001953 // If read barriers are enabled, emit read barriers other than
1954 // Baker's using a slow path (and also unpoison the loaded
1955 // reference, if heap poisoning is enabled).
1956 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1957 }
Roland Levillain4d027112015-07-01 15:41:14 +01001958 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001959}
1960
1961void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1962 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001963 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001964 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001965 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
1966 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001967 } else if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001968 locations->SetInAt(1, Location::RequiresFpuRegister());
1969 } else {
1970 locations->SetInAt(1, Location::RequiresRegister());
1971 }
1972}
1973
1974void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001975 const FieldInfo& field_info,
1976 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001977 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
1978
1979 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001980 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001981 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001982 Offset offset = field_info.GetFieldOffset();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001983 DataType::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001984
Roland Levillain4d027112015-07-01 15:41:14 +01001985 {
1986 // We use a block to end the scratch scope before the write barrier, thus
1987 // freeing the temporary registers so they can be used in `MarkGCCard`.
1988 UseScratchRegisterScope temps(GetVIXLAssembler());
1989
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001990 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01001991 DCHECK(value.IsW());
1992 Register temp = temps.AcquireW();
1993 __ Mov(temp, value.W());
1994 GetAssembler()->PoisonHeapReference(temp.W());
1995 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001996 }
Roland Levillain4d027112015-07-01 15:41:14 +01001997
1998 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00001999 codegen_->StoreRelease(
Andreas Gampe3db70682018-12-26 15:12:03 -08002000 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check= */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01002001 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002002 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2003 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002004 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2005 codegen_->MaybeRecordImplicitNullCheck(instruction);
2006 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002007 }
2008
2009 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002010 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002011 }
2012}
2013
Alexandre Rames67555f72014-11-18 10:55:16 +00002014void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002015 DataType::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002016
2017 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002018 case DataType::Type::kInt32:
2019 case DataType::Type::kInt64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002020 Register dst = OutputRegister(instr);
2021 Register lhs = InputRegisterAt(instr, 0);
2022 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002023 if (instr->IsAdd()) {
2024 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002025 } else if (instr->IsAnd()) {
2026 __ And(dst, lhs, rhs);
2027 } else if (instr->IsOr()) {
2028 __ Orr(dst, lhs, rhs);
2029 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002030 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002031 } else if (instr->IsRor()) {
2032 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002033 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002034 __ Ror(dst, lhs, shift);
2035 } else {
2036 // Ensure shift distance is in the same size register as the result. If
2037 // we are rotating a long and the shift comes in a w register originally,
2038 // we don't need to sxtw for use as an x since the shift distances are
2039 // all & reg_bits - 1.
2040 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2041 }
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002042 } else if (instr->IsMin() || instr->IsMax()) {
2043 __ Cmp(lhs, rhs);
2044 __ Csel(dst, lhs, rhs, instr->IsMin() ? lt : gt);
Alexandre Rames67555f72014-11-18 10:55:16 +00002045 } else {
2046 DCHECK(instr->IsXor());
2047 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002048 }
2049 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002050 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002051 case DataType::Type::kFloat32:
2052 case DataType::Type::kFloat64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002053 FPRegister dst = OutputFPRegister(instr);
2054 FPRegister lhs = InputFPRegisterAt(instr, 0);
2055 FPRegister rhs = InputFPRegisterAt(instr, 1);
2056 if (instr->IsAdd()) {
2057 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002058 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002059 __ Fsub(dst, lhs, rhs);
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002060 } else if (instr->IsMin()) {
2061 __ Fmin(dst, lhs, rhs);
2062 } else if (instr->IsMax()) {
2063 __ Fmax(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002064 } else {
2065 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002066 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002067 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002068 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002069 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002070 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002071 }
2072}
2073
Serban Constantinescu02164b32014-11-13 14:05:07 +00002074void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2075 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2076
Vladimir Markoca6fff82017-10-03 14:49:14 +01002077 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002078 DataType::Type type = instr->GetResultType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002079 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002080 case DataType::Type::kInt32:
2081 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002082 locations->SetInAt(0, Location::RequiresRegister());
2083 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Artem Serov87c97052016-09-23 13:34:31 +01002084 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002085 break;
2086 }
2087 default:
2088 LOG(FATAL) << "Unexpected shift type " << type;
2089 }
2090}
2091
2092void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2093 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2094
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002095 DataType::Type type = instr->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002096 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002097 case DataType::Type::kInt32:
2098 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002099 Register dst = OutputRegister(instr);
2100 Register lhs = InputRegisterAt(instr, 0);
2101 Operand rhs = InputOperandAt(instr, 1);
2102 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002103 uint32_t shift_value = rhs.GetImmediate() &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002104 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002105 if (instr->IsShl()) {
2106 __ Lsl(dst, lhs, shift_value);
2107 } else if (instr->IsShr()) {
2108 __ Asr(dst, lhs, shift_value);
2109 } else {
2110 __ Lsr(dst, lhs, shift_value);
2111 }
2112 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002113 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002114
2115 if (instr->IsShl()) {
2116 __ Lsl(dst, lhs, rhs_reg);
2117 } else if (instr->IsShr()) {
2118 __ Asr(dst, lhs, rhs_reg);
2119 } else {
2120 __ Lsr(dst, lhs, rhs_reg);
2121 }
2122 }
2123 break;
2124 }
2125 default:
2126 LOG(FATAL) << "Unexpected shift operation type " << type;
2127 }
2128}
2129
Alexandre Rames5319def2014-10-23 10:03:10 +01002130void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002131 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002132}
2133
2134void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002135 HandleBinaryOp(instruction);
2136}
2137
2138void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2139 HandleBinaryOp(instruction);
2140}
2141
2142void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2143 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002144}
2145
Artem Serov7fc63502016-02-09 17:15:29 +00002146void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002147 DCHECK(DataType::IsIntegralType(instr->GetType())) << instr->GetType();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002148 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002149 locations->SetInAt(0, Location::RequiresRegister());
2150 // There is no immediate variant of negated bitwise instructions in AArch64.
2151 locations->SetInAt(1, Location::RequiresRegister());
2152 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2153}
2154
Artem Serov7fc63502016-02-09 17:15:29 +00002155void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002156 Register dst = OutputRegister(instr);
2157 Register lhs = InputRegisterAt(instr, 0);
2158 Register rhs = InputRegisterAt(instr, 1);
2159
2160 switch (instr->GetOpKind()) {
2161 case HInstruction::kAnd:
2162 __ Bic(dst, lhs, rhs);
2163 break;
2164 case HInstruction::kOr:
2165 __ Orn(dst, lhs, rhs);
2166 break;
2167 case HInstruction::kXor:
2168 __ Eon(dst, lhs, rhs);
2169 break;
2170 default:
2171 LOG(FATAL) << "Unreachable";
2172 }
2173}
2174
Anton Kirilov74234da2017-01-13 14:42:47 +00002175void LocationsBuilderARM64::VisitDataProcWithShifterOp(
2176 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002177 DCHECK(instruction->GetType() == DataType::Type::kInt32 ||
2178 instruction->GetType() == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002179 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002180 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames8626b742015-11-25 16:28:08 +00002181 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2182 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2183 } else {
2184 locations->SetInAt(0, Location::RequiresRegister());
2185 }
2186 locations->SetInAt(1, Location::RequiresRegister());
2187 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2188}
2189
Anton Kirilov74234da2017-01-13 14:42:47 +00002190void InstructionCodeGeneratorARM64::VisitDataProcWithShifterOp(
2191 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002192 DataType::Type type = instruction->GetType();
Alexandre Rames8626b742015-11-25 16:28:08 +00002193 HInstruction::InstructionKind kind = instruction->GetInstrKind();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002194 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002195 Register out = OutputRegister(instruction);
2196 Register left;
2197 if (kind != HInstruction::kNeg) {
2198 left = InputRegisterAt(instruction, 0);
2199 }
Anton Kirilov74234da2017-01-13 14:42:47 +00002200 // If this `HDataProcWithShifterOp` was created by merging a type conversion as the
Alexandre Rames8626b742015-11-25 16:28:08 +00002201 // shifter operand operation, the IR generating `right_reg` (input to the type
2202 // conversion) can have a different type from the current instruction's type,
2203 // so we manually indicate the type.
2204 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Alexandre Rames8626b742015-11-25 16:28:08 +00002205 Operand right_operand(0);
2206
Anton Kirilov74234da2017-01-13 14:42:47 +00002207 HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2208 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002209 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2210 } else {
Anton Kirilov74234da2017-01-13 14:42:47 +00002211 right_operand = Operand(right_reg,
2212 helpers::ShiftFromOpKind(op_kind),
2213 instruction->GetShiftAmount());
Alexandre Rames8626b742015-11-25 16:28:08 +00002214 }
2215
2216 // Logical binary operations do not support extension operations in the
2217 // operand. Note that VIXL would still manage if it was passed by generating
2218 // the extension as a separate instruction.
2219 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2220 DCHECK(!right_operand.IsExtendedRegister() ||
2221 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2222 kind != HInstruction::kNeg));
2223 switch (kind) {
2224 case HInstruction::kAdd:
2225 __ Add(out, left, right_operand);
2226 break;
2227 case HInstruction::kAnd:
2228 __ And(out, left, right_operand);
2229 break;
2230 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002231 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002232 __ Neg(out, right_operand);
2233 break;
2234 case HInstruction::kOr:
2235 __ Orr(out, left, right_operand);
2236 break;
2237 case HInstruction::kSub:
2238 __ Sub(out, left, right_operand);
2239 break;
2240 case HInstruction::kXor:
2241 __ Eor(out, left, right_operand);
2242 break;
2243 default:
2244 LOG(FATAL) << "Unexpected operation kind: " << kind;
2245 UNREACHABLE();
2246 }
2247}
2248
Artem Serov328429f2016-07-06 16:23:04 +01002249void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002250 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002251 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002252 locations->SetInAt(0, Location::RequiresRegister());
2253 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
Artem Serov87c97052016-09-23 13:34:31 +01002254 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002255}
2256
Roland Levillain19c54192016-11-04 13:44:09 +00002257void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002258 __ Add(OutputRegister(instruction),
2259 InputRegisterAt(instruction, 0),
2260 Operand(InputOperandAt(instruction, 1)));
2261}
2262
Artem Serove1811ed2017-04-27 16:50:47 +01002263void LocationsBuilderARM64::VisitIntermediateAddressIndex(HIntermediateAddressIndex* instruction) {
2264 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002265 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serove1811ed2017-04-27 16:50:47 +01002266
2267 HIntConstant* shift = instruction->GetShift()->AsIntConstant();
2268
2269 locations->SetInAt(0, Location::RequiresRegister());
2270 // For byte case we don't need to shift the index variable so we can encode the data offset into
2271 // ADD instruction. For other cases we prefer the data_offset to be in register; that will hoist
2272 // data offset constant generation out of the loop and reduce the critical path length in the
2273 // loop.
2274 locations->SetInAt(1, shift->GetValue() == 0
2275 ? Location::ConstantLocation(instruction->GetOffset()->AsIntConstant())
2276 : Location::RequiresRegister());
2277 locations->SetInAt(2, Location::ConstantLocation(shift));
2278 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2279}
2280
2281void InstructionCodeGeneratorARM64::VisitIntermediateAddressIndex(
2282 HIntermediateAddressIndex* instruction) {
2283 Register index_reg = InputRegisterAt(instruction, 0);
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002284 uint32_t shift = Int64FromLocation(instruction->GetLocations()->InAt(2));
Artem Serove1811ed2017-04-27 16:50:47 +01002285 uint32_t offset = instruction->GetOffset()->AsIntConstant()->GetValue();
2286
2287 if (shift == 0) {
2288 __ Add(OutputRegister(instruction), index_reg, offset);
2289 } else {
2290 Register offset_reg = InputRegisterAt(instruction, 1);
2291 __ Add(OutputRegister(instruction), offset_reg, Operand(index_reg, LSL, shift));
2292 }
2293}
2294
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002295void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002296 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002297 new (GetGraph()->GetAllocator()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002298 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2299 if (instr->GetOpKind() == HInstruction::kSub &&
2300 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002301 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002302 // Don't allocate register for Mneg instruction.
2303 } else {
2304 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2305 Location::RequiresRegister());
2306 }
2307 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2308 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002309 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2310}
2311
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002312void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002313 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002314 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2315 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002316
2317 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2318 // This fixup should be carried out for all multiply-accumulate instructions:
2319 // madd, msub, smaddl, smsubl, umaddl and umsubl.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002320 if (instr->GetType() == DataType::Type::kInt64 &&
Alexandre Rames418318f2015-11-20 15:55:47 +00002321 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2322 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002323 vixl::aarch64::Instruction* prev =
2324 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002325 if (prev->IsLoadOrStore()) {
2326 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002327 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002328 __ nop();
2329 }
2330 }
2331
2332 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002333 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002334 __ Madd(res, mul_left, mul_right, accumulator);
2335 } else {
2336 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002337 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002338 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002339 __ Mneg(res, mul_left, mul_right);
2340 } else {
2341 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2342 __ Msub(res, mul_left, mul_right, accumulator);
2343 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002344 }
2345}
2346
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002347void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002348 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002349 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002350 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002351 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2352 object_array_get_with_read_barrier
2353 ? LocationSummary::kCallOnSlowPath
2354 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002355 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002356 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002357 if (instruction->GetIndex()->IsConstant()) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002358 // Array loads with constant index are treated as field loads.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002359 // We need a temporary register for the read barrier load in
2360 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
2361 // only if the offset is too big.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002362 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
2363 uint32_t index = instruction->GetIndex()->AsIntConstant()->GetValue();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002364 offset += index << DataType::SizeShift(DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002365 if (offset >= kReferenceLoadMinFarOffset) {
2366 locations->AddTemp(FixedTempLocation());
2367 }
Artem Serov0806f582018-10-11 20:14:20 +01002368 } else if (!instruction->GetArray()->IsIntermediateAddress()) {
Vladimir Marko008e09f32018-08-06 15:42:43 +01002369 // We need a non-scratch temporary for the array data pointer in
Artem Serov0806f582018-10-11 20:14:20 +01002370 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier() for the case with no
2371 // intermediate address.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002372 locations->AddTemp(Location::RequiresRegister());
2373 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002374 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002375 locations->SetInAt(0, Location::RequiresRegister());
2376 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002377 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002378 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2379 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002380 // The output overlaps in the case of an object array get with
2381 // read barriers enabled: we do not want the move to overwrite the
2382 // array's location, as we need it to emit the read barrier.
2383 locations->SetOut(
2384 Location::RequiresRegister(),
2385 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002386 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002387}
2388
2389void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002390 DataType::Type type = instruction->GetType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002391 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002392 LocationSummary* locations = instruction->GetLocations();
2393 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002394 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002395 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002396 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2397 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002398 MacroAssembler* masm = GetVIXLAssembler();
2399 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002400
Artem Serov0806f582018-10-11 20:14:20 +01002401 // The non-Baker read barrier instrumentation of object ArrayGet instructions
Roland Levillain19c54192016-11-04 13:44:09 +00002402 // does not support the HIntermediateAddress instruction.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002403 DCHECK(!((type == DataType::Type::kReference) &&
Roland Levillain19c54192016-11-04 13:44:09 +00002404 instruction->GetArray()->IsIntermediateAddress() &&
Artem Serov0806f582018-10-11 20:14:20 +01002405 kEmitCompilerReadBarrier &&
2406 !kUseBakerReadBarrier));
Roland Levillain19c54192016-11-04 13:44:09 +00002407
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002408 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00002409 // Object ArrayGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002410 // Note that a potential implicit null check is handled in the
2411 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
Vladimir Marko66d691d2017-04-07 17:53:39 +01002412 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002413 if (index.IsConstant()) {
Artem Serov0806f582018-10-11 20:14:20 +01002414 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002415 // Array load with a constant index can be treated as a field load.
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002416 offset += Int64FromLocation(index) << DataType::SizeShift(type);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002417 Location maybe_temp =
2418 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
2419 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2420 out,
2421 obj.W(),
2422 offset,
2423 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08002424 /* needs_null_check= */ false,
2425 /* use_load_acquire= */ false);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002426 } else {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002427 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08002428 instruction, out, obj.W(), offset, index, /* needs_null_check= */ false);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002429 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002430 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002431 // General case.
2432 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002433 Register length;
2434 if (maybe_compressed_char_at) {
2435 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2436 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002437 {
2438 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2439 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2440
2441 if (instruction->GetArray()->IsIntermediateAddress()) {
2442 DCHECK_LT(count_offset, offset);
2443 int64_t adjusted_offset =
2444 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2445 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2446 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2447 } else {
2448 __ Ldr(length, HeapOperand(obj, count_offset));
2449 }
2450 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002451 }
jessicahandojo05765752016-09-09 19:01:32 -07002452 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002453 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002454 if (maybe_compressed_char_at) {
2455 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002456 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2457 "Expecting 0=compressed, 1=uncompressed");
2458 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002459 __ Ldrb(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002460 HeapOperand(obj, offset + Int64FromLocation(index)));
jessicahandojo05765752016-09-09 19:01:32 -07002461 __ B(&done);
2462 __ Bind(&uncompressed_load);
2463 __ Ldrh(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002464 HeapOperand(obj, offset + (Int64FromLocation(index) << 1)));
jessicahandojo05765752016-09-09 19:01:32 -07002465 __ Bind(&done);
2466 } else {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002467 offset += Int64FromLocation(index) << DataType::SizeShift(type);
jessicahandojo05765752016-09-09 19:01:32 -07002468 source = HeapOperand(obj, offset);
2469 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002470 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002471 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002472 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002473 // We do not need to compute the intermediate address from the array: the
2474 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002475 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002476 if (kIsDebugBuild) {
Artem Serov0806f582018-10-11 20:14:20 +01002477 HIntermediateAddress* interm_addr = instruction->GetArray()->AsIntermediateAddress();
2478 DCHECK_EQ(interm_addr->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
Roland Levillain44015862016-01-22 11:47:17 +00002479 }
2480 temp = obj;
2481 } else {
2482 __ Add(temp, obj, offset);
2483 }
jessicahandojo05765752016-09-09 19:01:32 -07002484 if (maybe_compressed_char_at) {
2485 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002486 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2487 "Expecting 0=compressed, 1=uncompressed");
2488 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002489 __ Ldrb(Register(OutputCPURegister(instruction)),
2490 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2491 __ B(&done);
2492 __ Bind(&uncompressed_load);
2493 __ Ldrh(Register(OutputCPURegister(instruction)),
2494 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2495 __ Bind(&done);
2496 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002497 source = HeapOperand(temp, XRegisterFrom(index), LSL, DataType::SizeShift(type));
jessicahandojo05765752016-09-09 19:01:32 -07002498 }
Roland Levillain44015862016-01-22 11:47:17 +00002499 }
jessicahandojo05765752016-09-09 19:01:32 -07002500 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002501 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2502 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002503 codegen_->Load(type, OutputCPURegister(instruction), source);
2504 codegen_->MaybeRecordImplicitNullCheck(instruction);
2505 }
Roland Levillain44015862016-01-22 11:47:17 +00002506
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002507 if (type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002508 static_assert(
2509 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2510 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2511 Location obj_loc = locations->InAt(0);
2512 if (index.IsConstant()) {
2513 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2514 } else {
2515 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2516 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002517 }
Roland Levillain4d027112015-07-01 15:41:14 +01002518 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002519}
2520
Alexandre Rames5319def2014-10-23 10:03:10 +01002521void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002522 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002523 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002524 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002525}
2526
2527void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002528 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002529 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002530 {
2531 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2532 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2533 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2534 codegen_->MaybeRecordImplicitNullCheck(instruction);
2535 }
jessicahandojo05765752016-09-09 19:01:32 -07002536 // Mask out compression flag from String's array length.
2537 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002538 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002539 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002540}
2541
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002542void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002543 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002544
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002545 bool needs_type_check = instruction->NeedsTypeCheck();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002546 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002547 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002548 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002549 locations->SetInAt(0, Location::RequiresRegister());
2550 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002551 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2552 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002553 } else if (DataType::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002554 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002555 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002556 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002557 }
2558}
2559
2560void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002561 DataType::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002562 LocationSummary* locations = instruction->GetLocations();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002563 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002564 bool needs_write_barrier =
2565 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002566
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002567 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002568 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002569 CPURegister source = value;
2570 Location index = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002571 size_t offset = mirror::Array::DataOffset(DataType::Size(value_type)).Uint32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002572 MemOperand destination = HeapOperand(array);
2573 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002574
2575 if (!needs_write_barrier) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002576 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002577 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002578 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002579 destination = HeapOperand(array, offset);
2580 } else {
2581 UseScratchRegisterScope temps(masm);
2582 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002583 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002584 // We do not need to compute the intermediate address from the array: the
2585 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002586 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002587 if (kIsDebugBuild) {
Artem Serov0806f582018-10-11 20:14:20 +01002588 HIntermediateAddress* interm_addr = instruction->GetArray()->AsIntermediateAddress();
2589 DCHECK(interm_addr->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002590 }
2591 temp = array;
2592 } else {
2593 __ Add(temp, array, offset);
2594 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002595 destination = HeapOperand(temp,
2596 XRegisterFrom(index),
2597 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002598 DataType::SizeShift(value_type));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002599 }
Artem Serov914d7a82017-02-07 14:33:49 +00002600 {
2601 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2602 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2603 codegen_->Store(value_type, value, destination);
2604 codegen_->MaybeRecordImplicitNullCheck(instruction);
2605 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002606 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002607 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002608
2609 bool can_value_be_null = instruction->GetValueCanBeNull();
2610 vixl::aarch64::Label do_store;
2611 if (can_value_be_null) {
2612 __ Cbz(Register(value), &do_store);
2613 }
2614
Vladimir Marko0dda8c82019-05-16 12:47:40 +00002615 SlowPathCodeARM64* slow_path = nullptr;
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002616 if (needs_type_check) {
2617 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathARM64(instruction);
2618 codegen_->AddSlowPath(slow_path);
2619
2620 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2621 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2622 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2623
Alexandre Rames97833a02015-04-16 15:07:12 +01002624 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002625 Register temp = temps.AcquireSameSizeAs(array);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002626 Register temp2 = temps.AcquireSameSizeAs(array);
2627
2628 // Note that when Baker read barriers are enabled, the type
2629 // checks are performed without read barriers. This is fine,
2630 // even in the case where a class object is in the from-space
2631 // after the flip, as a comparison involving such a type would
2632 // not produce a false positive; it may of course produce a
2633 // false negative, in which case we would take the ArraySet
2634 // slow path.
2635
2636 // /* HeapReference<Class> */ temp = array->klass_
2637 {
2638 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2639 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2640 __ Ldr(temp, HeapOperand(array, class_offset));
2641 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames97833a02015-04-16 15:07:12 +01002642 }
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002643 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Alexandre Rames97833a02015-04-16 15:07:12 +01002644
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002645 // /* HeapReference<Class> */ temp = temp->component_type_
2646 __ Ldr(temp, HeapOperand(temp, component_offset));
2647 // /* HeapReference<Class> */ temp2 = value->klass_
2648 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2649 // If heap poisoning is enabled, no need to unpoison `temp`
2650 // nor `temp2`, as we are comparing two poisoned references.
2651 __ Cmp(temp, temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002652
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002653 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2654 vixl::aarch64::Label do_put;
2655 __ B(eq, &do_put);
2656 // If heap poisoning is enabled, the `temp` reference has
2657 // not been unpoisoned yet; unpoison it now.
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002658 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01002659
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002660 // /* HeapReference<Class> */ temp = temp->super_class_
2661 __ Ldr(temp, HeapOperand(temp, super_offset));
2662 // If heap poisoning is enabled, no need to unpoison
2663 // `temp`, as we are comparing against null below.
2664 __ Cbnz(temp, slow_path->GetEntryLabel());
2665 __ Bind(&do_put);
Vladimir Markod1ef8732017-04-18 13:55:13 +01002666 } else {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002667 __ B(ne, slow_path->GetEntryLabel());
Vladimir Marko0dda8c82019-05-16 12:47:40 +00002668 }
2669 }
2670
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002671 codegen_->MarkGCCard(array, value.W(), /* value_can_be_null= */ false);
Vladimir Marko0dda8c82019-05-16 12:47:40 +00002672
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002673 if (can_value_be_null) {
2674 DCHECK(do_store.IsLinked());
2675 __ Bind(&do_store);
2676 }
2677
2678 UseScratchRegisterScope temps(masm);
2679 if (kPoisonHeapReferences) {
2680 Register temp_source = temps.AcquireSameSizeAs(array);
2681 DCHECK(value.IsW());
2682 __ Mov(temp_source, value.W());
2683 GetAssembler()->PoisonHeapReference(temp_source);
2684 source = temp_source;
2685 }
2686
2687 if (index.IsConstant()) {
2688 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
2689 destination = HeapOperand(array, offset);
2690 } else {
2691 Register temp_base = temps.AcquireSameSizeAs(array);
2692 __ Add(temp_base, array, offset);
2693 destination = HeapOperand(temp_base,
2694 XRegisterFrom(index),
2695 LSL,
2696 DataType::SizeShift(value_type));
2697 }
2698
2699 {
2700 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2701 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2702 __ Str(source, destination);
2703
2704 if (can_value_be_null || !needs_type_check) {
2705 codegen_->MaybeRecordImplicitNullCheck(instruction);
2706 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00002707 }
2708
2709 if (slow_path != nullptr) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002710 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002711 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002712 }
2713}
2714
Alexandre Rames67555f72014-11-18 10:55:16 +00002715void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002716 RegisterSet caller_saves = RegisterSet::Empty();
2717 InvokeRuntimeCallingConvention calling_convention;
2718 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2719 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
2720 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00002721 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002722 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002723}
2724
2725void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002726 BoundsCheckSlowPathARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002727 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002728 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00002729 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2730 __ B(slow_path->GetEntryLabel(), hs);
2731}
2732
Alexandre Rames67555f72014-11-18 10:55:16 +00002733void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2734 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002735 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexandre Rames67555f72014-11-18 10:55:16 +00002736 locations->SetInAt(0, Location::RequiresRegister());
2737 if (check->HasUses()) {
2738 locations->SetOut(Location::SameAsFirstInput());
2739 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01002740 // Rely on the type initialization to save everything we need.
2741 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexandre Rames67555f72014-11-18 10:55:16 +00002742}
2743
2744void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2745 // We assume the class is not null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01002746 SlowPathCodeARM64* slow_path =
2747 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(check->GetLoadClass(), check);
Alexandre Rames67555f72014-11-18 10:55:16 +00002748 codegen_->AddSlowPath(slow_path);
2749 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2750}
2751
Roland Levillain1a653882016-03-18 18:05:57 +00002752static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2753 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2754 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2755}
2756
2757void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2758 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2759 Location rhs_loc = instruction->GetLocations()->InAt(1);
2760 if (rhs_loc.IsConstant()) {
2761 // 0.0 is the only immediate that can be encoded directly in
2762 // an FCMP instruction.
2763 //
2764 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2765 // specify that in a floating-point comparison, positive zero
2766 // and negative zero are considered equal, so we can use the
2767 // literal 0.0 for both cases here.
2768 //
2769 // Note however that some methods (Float.equal, Float.compare,
2770 // Float.compareTo, Double.equal, Double.compare,
2771 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2772 // StrictMath.min) consider 0.0 to be (strictly) greater than
2773 // -0.0. So if we ever translate calls to these methods into a
2774 // HCompare instruction, we must handle the -0.0 case with
2775 // care here.
2776 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2777 __ Fcmp(lhs_reg, 0.0);
2778 } else {
2779 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2780 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002781}
2782
Serban Constantinescu02164b32014-11-13 14:05:07 +00002783void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002784 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002785 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002786 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002787 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002788 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002789 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002790 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002791 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002792 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002793 case DataType::Type::kInt32:
2794 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002795 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002796 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002797 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2798 break;
2799 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002800 case DataType::Type::kFloat32:
2801 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002802 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002803 locations->SetInAt(1,
2804 IsFloatingPointZeroConstant(compare->InputAt(1))
2805 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2806 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002807 locations->SetOut(Location::RequiresRegister());
2808 break;
2809 }
2810 default:
2811 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2812 }
2813}
2814
2815void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002816 DataType::Type in_type = compare->InputAt(0)->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002817
2818 // 0 if: left == right
2819 // 1 if: left > right
2820 // -1 if: left < right
2821 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002822 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002823 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002824 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002825 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002826 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002827 case DataType::Type::kInt32:
2828 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002829 Register result = OutputRegister(compare);
2830 Register left = InputRegisterAt(compare, 0);
2831 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002832 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002833 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2834 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002835 break;
2836 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002837 case DataType::Type::kFloat32:
2838 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002839 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002840 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002841 __ Cset(result, ne);
2842 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002843 break;
2844 }
2845 default:
2846 LOG(FATAL) << "Unimplemented compare type " << in_type;
2847 }
2848}
2849
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002850void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002851 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002852
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002853 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002854 locations->SetInAt(0, Location::RequiresFpuRegister());
2855 locations->SetInAt(1,
2856 IsFloatingPointZeroConstant(instruction->InputAt(1))
2857 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2858 : Location::RequiresFpuRegister());
2859 } else {
2860 // Integer cases.
2861 locations->SetInAt(0, Location::RequiresRegister());
2862 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2863 }
2864
David Brazdilb3e773e2016-01-26 11:28:37 +00002865 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002866 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002867 }
2868}
2869
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002870void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002871 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002872 return;
2873 }
2874
2875 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002876 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002877 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002878
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002879 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002880 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002881 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002882 } else {
2883 // Integer cases.
2884 Register lhs = InputRegisterAt(instruction, 0);
2885 Operand rhs = InputOperandAt(instruction, 1);
2886 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002887 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002888 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002889}
2890
2891#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2892 M(Equal) \
2893 M(NotEqual) \
2894 M(LessThan) \
2895 M(LessThanOrEqual) \
2896 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002897 M(GreaterThanOrEqual) \
2898 M(Below) \
2899 M(BelowOrEqual) \
2900 M(Above) \
2901 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002902#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002903void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2904void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002905FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002906#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002907#undef FOR_EACH_CONDITION_INSTRUCTION
2908
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002909void InstructionCodeGeneratorARM64::GenerateIntDivForPower2Denom(HDiv* instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002910 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002911 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002912 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
2913
2914 Register out = OutputRegister(instruction);
2915 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01002916
2917 if (abs_imm == 2) {
2918 int bits = DataType::Size(instruction->GetResultType()) * kBitsPerByte;
2919 __ Add(out, dividend, Operand(dividend, LSR, bits - 1));
2920 } else {
2921 UseScratchRegisterScope temps(GetVIXLAssembler());
2922 Register temp = temps.AcquireSameSizeAs(out);
2923 __ Add(temp, dividend, abs_imm - 1);
2924 __ Cmp(dividend, 0);
2925 __ Csel(out, temp, dividend, lt);
2926 }
2927
Zheng Xuc6667102015-05-15 16:08:45 +08002928 int ctz_imm = CTZ(abs_imm);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002929 if (imm > 0) {
2930 __ Asr(out, out, ctz_imm);
Zheng Xuc6667102015-05-15 16:08:45 +08002931 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002932 __ Neg(out, Operand(out, ASR, ctz_imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002933 }
2934}
2935
2936void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2937 DCHECK(instruction->IsDiv() || instruction->IsRem());
2938
2939 LocationSummary* locations = instruction->GetLocations();
2940 Location second = locations->InAt(1);
2941 DCHECK(second.IsConstant());
2942
2943 Register out = OutputRegister(instruction);
2944 Register dividend = InputRegisterAt(instruction, 0);
2945 int64_t imm = Int64FromConstant(second.GetConstant());
2946
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002947 DataType::Type type = instruction->GetResultType();
2948 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Zheng Xuc6667102015-05-15 16:08:45 +08002949
2950 int64_t magic;
2951 int shift;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002952 CalculateMagicAndShiftForDivRem(
Andreas Gampe3db70682018-12-26 15:12:03 -08002953 imm, /* is_long= */ type == DataType::Type::kInt64, &magic, &shift);
Zheng Xuc6667102015-05-15 16:08:45 +08002954
2955 UseScratchRegisterScope temps(GetVIXLAssembler());
2956 Register temp = temps.AcquireSameSizeAs(out);
2957
2958 // temp = get_high(dividend * magic)
2959 __ Mov(temp, magic);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002960 if (type == DataType::Type::kInt64) {
Zheng Xuc6667102015-05-15 16:08:45 +08002961 __ Smulh(temp, dividend, temp);
2962 } else {
2963 __ Smull(temp.X(), dividend, temp);
2964 __ Lsr(temp.X(), temp.X(), 32);
2965 }
2966
2967 if (imm > 0 && magic < 0) {
2968 __ Add(temp, temp, dividend);
2969 } else if (imm < 0 && magic > 0) {
2970 __ Sub(temp, temp, dividend);
2971 }
2972
2973 if (shift != 0) {
2974 __ Asr(temp, temp, shift);
2975 }
2976
2977 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002978 __ Sub(out, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08002979 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002980 __ Sub(temp, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08002981 // TODO: Strength reduction for msub.
2982 Register temp_imm = temps.AcquireSameSizeAs(out);
2983 __ Mov(temp_imm, imm);
2984 __ Msub(out, temp, temp_imm, dividend);
2985 }
2986}
2987
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002988void InstructionCodeGeneratorARM64::GenerateIntDivForConstDenom(HDiv *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002989 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Zheng Xuc6667102015-05-15 16:08:45 +08002990
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002991 if (imm == 0) {
2992 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2993 return;
2994 }
Zheng Xuc6667102015-05-15 16:08:45 +08002995
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002996 if (IsPowerOfTwo(AbsOrMin(imm))) {
2997 GenerateIntDivForPower2Denom(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08002998 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002999 // Cases imm == -1 or imm == 1 are handled by InstructionSimplifier.
3000 DCHECK(imm < -2 || imm > 2) << imm;
3001 GenerateDivRemWithAnyConstant(instruction);
3002 }
3003}
3004
3005void InstructionCodeGeneratorARM64::GenerateIntDiv(HDiv *instruction) {
3006 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
3007 << instruction->GetResultType();
3008
3009 if (instruction->GetLocations()->InAt(1).IsConstant()) {
3010 GenerateIntDivForConstDenom(instruction);
3011 } else {
3012 Register out = OutputRegister(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003013 Register dividend = InputRegisterAt(instruction, 0);
3014 Register divisor = InputRegisterAt(instruction, 1);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003015 __ Sdiv(out, dividend, divisor);
Zheng Xuc6667102015-05-15 16:08:45 +08003016 }
3017}
3018
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003019void LocationsBuilderARM64::VisitDiv(HDiv* div) {
3020 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003021 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003022 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003023 case DataType::Type::kInt32:
3024 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003025 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003026 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003027 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3028 break;
3029
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003030 case DataType::Type::kFloat32:
3031 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003032 locations->SetInAt(0, Location::RequiresFpuRegister());
3033 locations->SetInAt(1, Location::RequiresFpuRegister());
3034 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3035 break;
3036
3037 default:
3038 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3039 }
3040}
3041
3042void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003043 DataType::Type type = div->GetResultType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003044 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003045 case DataType::Type::kInt32:
3046 case DataType::Type::kInt64:
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003047 GenerateIntDiv(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003048 break;
3049
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003050 case DataType::Type::kFloat32:
3051 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003052 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3053 break;
3054
3055 default:
3056 LOG(FATAL) << "Unexpected div type " << type;
3057 }
3058}
3059
Alexandre Rames67555f72014-11-18 10:55:16 +00003060void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003061 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003062 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003063}
3064
3065void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3066 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003067 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003068 codegen_->AddSlowPath(slow_path);
3069 Location value = instruction->GetLocations()->InAt(0);
3070
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003071 DataType::Type type = instruction->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +00003072
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003073 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003074 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Elliott Hughesc1896c92018-11-29 11:33:18 -08003075 UNREACHABLE();
Alexandre Rames3e69f162014-12-10 10:36:50 +00003076 }
3077
Alexandre Rames67555f72014-11-18 10:55:16 +00003078 if (value.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003079 int64_t divisor = Int64FromLocation(value);
Alexandre Rames67555f72014-11-18 10:55:16 +00003080 if (divisor == 0) {
3081 __ B(slow_path->GetEntryLabel());
3082 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003083 // A division by a non-null constant is valid. We don't need to perform
3084 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003085 }
3086 } else {
3087 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3088 }
3089}
3090
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003091void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3092 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003093 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003094 locations->SetOut(Location::ConstantLocation(constant));
3095}
3096
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003097void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3098 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003099 // Will be generated at use site.
3100}
3101
Alexandre Rames5319def2014-10-23 10:03:10 +01003102void LocationsBuilderARM64::VisitExit(HExit* exit) {
3103 exit->SetLocations(nullptr);
3104}
3105
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003106void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003107}
3108
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003109void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3110 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003111 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003112 locations->SetOut(Location::ConstantLocation(constant));
3113}
3114
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003115void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003116 // Will be generated at use site.
3117}
3118
David Brazdilfc6a86a2015-06-26 10:33:45 +00003119void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003120 if (successor->IsExitBlock()) {
3121 DCHECK(got->GetPrevious()->AlwaysThrows());
3122 return; // no code needed
3123 }
3124
Serban Constantinescu02164b32014-11-13 14:05:07 +00003125 HBasicBlock* block = got->GetBlock();
3126 HInstruction* previous = got->GetPrevious();
3127 HLoopInformation* info = block->GetLoopInformation();
3128
David Brazdil46e2a392015-03-16 17:31:52 +00003129 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00003130 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
3131 UseScratchRegisterScope temps(GetVIXLAssembler());
3132 Register temp1 = temps.AcquireX();
3133 Register temp2 = temps.AcquireX();
3134 __ Ldr(temp1, MemOperand(sp, 0));
3135 __ Ldrh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3136 __ Add(temp2, temp2, 1);
3137 __ Strh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3138 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003139 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3140 return;
3141 }
3142 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3143 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
Andreas Gampe3db70682018-12-26 15:12:03 -08003144 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003145 }
3146 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003147 __ B(codegen_->GetLabelOf(successor));
3148 }
3149}
3150
David Brazdilfc6a86a2015-06-26 10:33:45 +00003151void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3152 got->SetLocations(nullptr);
3153}
3154
3155void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3156 HandleGoto(got, got->GetSuccessor());
3157}
3158
3159void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3160 try_boundary->SetLocations(nullptr);
3161}
3162
3163void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3164 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3165 if (!successor->IsExitBlock()) {
3166 HandleGoto(try_boundary, successor);
3167 }
3168}
3169
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003170void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003171 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003172 vixl::aarch64::Label* true_target,
3173 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003174 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003175
David Brazdil0debae72015-11-12 18:37:00 +00003176 if (true_target == nullptr && false_target == nullptr) {
3177 // Nothing to do. The code always falls through.
3178 return;
3179 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003180 // Constant condition, statically compared against "true" (integer value 1).
3181 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003182 if (true_target != nullptr) {
3183 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003184 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003185 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003186 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003187 if (false_target != nullptr) {
3188 __ B(false_target);
3189 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003190 }
David Brazdil0debae72015-11-12 18:37:00 +00003191 return;
3192 }
3193
3194 // The following code generates these patterns:
3195 // (1) true_target == nullptr && false_target != nullptr
3196 // - opposite condition true => branch to false_target
3197 // (2) true_target != nullptr && false_target == nullptr
3198 // - condition true => branch to true_target
3199 // (3) true_target != nullptr && false_target != nullptr
3200 // - condition true => branch to true_target
3201 // - branch to false_target
3202 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003203 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003204 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003205 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003206 if (true_target == nullptr) {
3207 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3208 } else {
3209 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3210 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003211 } else {
3212 // The condition instruction has not been materialized, use its inputs as
3213 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003214 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003215
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003216 DataType::Type type = condition->InputAt(0)->GetType();
3217 if (DataType::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003218 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003219 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003220 IfCondition opposite_condition = condition->GetOppositeCondition();
3221 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003222 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003223 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003224 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003225 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003226 // Integer cases.
3227 Register lhs = InputRegisterAt(condition, 0);
3228 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003229
3230 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003231 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003232 if (true_target == nullptr) {
3233 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3234 non_fallthrough_target = false_target;
3235 } else {
3236 arm64_cond = ARM64Condition(condition->GetCondition());
3237 non_fallthrough_target = true_target;
3238 }
3239
Aart Bik086d27e2016-01-20 17:02:00 -08003240 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003241 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003242 switch (arm64_cond) {
3243 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003244 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003245 break;
3246 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003247 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003248 break;
3249 case lt:
3250 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003251 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003252 break;
3253 case ge:
3254 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003255 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003256 break;
3257 default:
3258 // Without the `static_cast` the compiler throws an error for
3259 // `-Werror=sign-promo`.
3260 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3261 }
3262 } else {
3263 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003264 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003265 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003266 }
3267 }
David Brazdil0debae72015-11-12 18:37:00 +00003268
3269 // If neither branch falls through (case 3), the conditional branch to `true_target`
3270 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3271 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003272 __ B(false_target);
3273 }
3274}
3275
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003276void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003277 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003278 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003279 locations->SetInAt(0, Location::RequiresRegister());
3280 }
3281}
3282
3283void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003284 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3285 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003286 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3287 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3288 true_target = nullptr;
3289 }
3290 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3291 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3292 false_target = nullptr;
3293 }
Andreas Gampe3db70682018-12-26 15:12:03 -08003294 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003295}
3296
3297void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003298 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003299 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01003300 InvokeRuntimeCallingConvention calling_convention;
3301 RegisterSet caller_saves = RegisterSet::Empty();
3302 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3303 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00003304 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003305 locations->SetInAt(0, Location::RequiresRegister());
3306 }
3307}
3308
3309void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003310 SlowPathCodeARM64* slow_path =
3311 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003312 GenerateTestAndBranch(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08003313 /* condition_input_index= */ 0,
David Brazdil0debae72015-11-12 18:37:00 +00003314 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08003315 /* false_target= */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003316}
3317
Mingyao Yang063fc772016-08-02 11:02:54 -07003318void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003319 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07003320 LocationSummary(flag, LocationSummary::kNoCall);
3321 locations->SetOut(Location::RequiresRegister());
3322}
3323
3324void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3325 __ Ldr(OutputRegister(flag),
3326 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3327}
3328
David Brazdilc0b601b2016-02-08 14:20:45 +00003329static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3330 return condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003331 DataType::IsFloatingPointType(condition->InputAt(0)->GetType());
David Brazdilc0b601b2016-02-08 14:20:45 +00003332}
3333
Alexandre Rames880f1192016-06-13 16:04:50 +01003334static inline Condition GetConditionForSelect(HCondition* condition) {
3335 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003336 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3337 : ARM64Condition(cond);
3338}
3339
David Brazdil74eb1b22015-12-14 11:44:01 +00003340void LocationsBuilderARM64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003341 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003342 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003343 locations->SetInAt(0, Location::RequiresFpuRegister());
3344 locations->SetInAt(1, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08003345 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames880f1192016-06-13 16:04:50 +01003346 } else {
3347 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3348 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3349 bool is_true_value_constant = cst_true_value != nullptr;
3350 bool is_false_value_constant = cst_false_value != nullptr;
3351 // Ask VIXL whether we should synthesize constants in registers.
3352 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3353 Operand true_op = is_true_value_constant ?
3354 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3355 Operand false_op = is_false_value_constant ?
3356 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3357 bool true_value_in_register = false;
3358 bool false_value_in_register = false;
3359 MacroAssembler::GetCselSynthesisInformation(
3360 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3361 true_value_in_register |= !is_true_value_constant;
3362 false_value_in_register |= !is_false_value_constant;
3363
3364 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3365 : Location::ConstantLocation(cst_true_value));
3366 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3367 : Location::ConstantLocation(cst_false_value));
Donghui Bai426b49c2016-11-08 14:55:38 +08003368 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
David Brazdil74eb1b22015-12-14 11:44:01 +00003369 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003370
David Brazdil74eb1b22015-12-14 11:44:01 +00003371 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3372 locations->SetInAt(2, Location::RequiresRegister());
3373 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003374}
3375
3376void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003377 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003378 Condition csel_cond;
3379
3380 if (IsBooleanValueOrMaterializedCondition(cond)) {
3381 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003382 // Use the condition flags set by the previous instruction.
3383 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003384 } else {
3385 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003386 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003387 }
3388 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003389 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003390 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003391 } else {
3392 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003393 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003394 }
3395
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003396 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003397 __ Fcsel(OutputFPRegister(select),
3398 InputFPRegisterAt(select, 1),
3399 InputFPRegisterAt(select, 0),
3400 csel_cond);
3401 } else {
3402 __ Csel(OutputRegister(select),
3403 InputOperandAt(select, 1),
3404 InputOperandAt(select, 0),
3405 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003406 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003407}
3408
David Srbecky0cf44932015-12-09 14:09:59 +00003409void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003410 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00003411}
3412
David Srbeckyd28f4a02016-03-14 17:14:24 +00003413void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3414 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003415}
3416
3417void CodeGeneratorARM64::GenerateNop() {
3418 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003419}
3420
Alexandre Rames5319def2014-10-23 10:03:10 +01003421void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00003422 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003423}
3424
3425void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003426 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003427}
3428
3429void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003430 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003431}
3432
3433void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003434 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003435}
3436
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003437// Temp is used for read barrier.
3438static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3439 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003440 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003441 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3442 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3443 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3444 return 1;
3445 }
3446 return 0;
3447}
3448
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003449// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003450// interface pointer, one for loading the current interface.
3451// The other checks have one temp for loading the object's class.
3452static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3453 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3454 return 3;
3455 }
3456 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003457}
3458
Alexandre Rames67555f72014-11-18 10:55:16 +00003459void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003460 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003461 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003462 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003463 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003464 case TypeCheckKind::kExactCheck:
3465 case TypeCheckKind::kAbstractClassCheck:
3466 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00003467 case TypeCheckKind::kArrayObjectCheck: {
3468 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
3469 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
3470 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003471 break;
Vladimir Marko87584542017-12-12 17:47:52 +00003472 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003473 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003474 case TypeCheckKind::kUnresolvedCheck:
3475 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003476 call_kind = LocationSummary::kCallOnSlowPath;
3477 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00003478 case TypeCheckKind::kBitstringCheck:
3479 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003480 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003481
Vladimir Markoca6fff82017-10-03 14:49:14 +01003482 LocationSummary* locations =
3483 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003484 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003485 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003486 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003487 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003488 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3489 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3490 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3491 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3492 } else {
3493 locations->SetInAt(1, Location::RequiresRegister());
3494 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003495 // The "out" register is used as a temporary, so it overlaps with the inputs.
3496 // Note that TypeCheckSlowPathARM64 uses this register too.
3497 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003498 // Add temps if necessary for read barriers.
3499 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003500}
3501
3502void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003503 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003504 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003505 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003506 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00003507 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
3508 ? Register()
3509 : InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003510 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003511 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003512 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3513 DCHECK_LE(num_temps, 1u);
3514 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003515 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3516 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3517 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3518 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003519
Scott Wakeling97c72b72016-06-24 16:19:36 +01003520 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003521 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003522
3523 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003524 // Avoid null check if we know `obj` is not null.
3525 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003526 __ Cbz(obj, &zero);
3527 }
3528
Roland Levillain44015862016-01-22 11:47:17 +00003529 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003530 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003531 ReadBarrierOption read_barrier_option =
3532 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003533 // /* HeapReference<Class> */ out = obj->klass_
3534 GenerateReferenceLoadTwoRegisters(instruction,
3535 out_loc,
3536 obj_loc,
3537 class_offset,
3538 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003539 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003540 __ Cmp(out, cls);
3541 __ Cset(out, eq);
3542 if (zero.IsLinked()) {
3543 __ B(&done);
3544 }
3545 break;
3546 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003547
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003548 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003549 ReadBarrierOption read_barrier_option =
3550 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003551 // /* HeapReference<Class> */ out = obj->klass_
3552 GenerateReferenceLoadTwoRegisters(instruction,
3553 out_loc,
3554 obj_loc,
3555 class_offset,
3556 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003557 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003558 // If the class is abstract, we eagerly fetch the super class of the
3559 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003560 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003561 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003562 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003563 GenerateReferenceLoadOneRegister(instruction,
3564 out_loc,
3565 super_offset,
3566 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003567 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003568 // If `out` is null, we use it for the result, and jump to `done`.
3569 __ Cbz(out, &done);
3570 __ Cmp(out, cls);
3571 __ B(ne, &loop);
3572 __ Mov(out, 1);
3573 if (zero.IsLinked()) {
3574 __ B(&done);
3575 }
3576 break;
3577 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003578
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003579 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003580 ReadBarrierOption read_barrier_option =
3581 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003582 // /* HeapReference<Class> */ out = obj->klass_
3583 GenerateReferenceLoadTwoRegisters(instruction,
3584 out_loc,
3585 obj_loc,
3586 class_offset,
3587 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003588 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003589 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003590 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003591 __ Bind(&loop);
3592 __ Cmp(out, cls);
3593 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003594 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003595 GenerateReferenceLoadOneRegister(instruction,
3596 out_loc,
3597 super_offset,
3598 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003599 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003600 __ Cbnz(out, &loop);
3601 // If `out` is null, we use it for the result, and jump to `done`.
3602 __ B(&done);
3603 __ Bind(&success);
3604 __ Mov(out, 1);
3605 if (zero.IsLinked()) {
3606 __ B(&done);
3607 }
3608 break;
3609 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003610
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003611 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003612 ReadBarrierOption read_barrier_option =
3613 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003614 // /* HeapReference<Class> */ out = obj->klass_
3615 GenerateReferenceLoadTwoRegisters(instruction,
3616 out_loc,
3617 obj_loc,
3618 class_offset,
3619 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003620 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003621 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003622 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003623 __ Cmp(out, cls);
3624 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003625 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003626 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003627 GenerateReferenceLoadOneRegister(instruction,
3628 out_loc,
3629 component_offset,
3630 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003631 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003632 // If `out` is null, we use it for the result, and jump to `done`.
3633 __ Cbz(out, &done);
3634 __ Ldrh(out, HeapOperand(out, primitive_offset));
3635 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3636 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003637 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003638 __ Mov(out, 1);
3639 __ B(&done);
3640 break;
3641 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003642
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003643 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003644 // No read barrier since the slow path will retry upon failure.
3645 // /* HeapReference<Class> */ out = obj->klass_
3646 GenerateReferenceLoadTwoRegisters(instruction,
3647 out_loc,
3648 obj_loc,
3649 class_offset,
3650 maybe_temp_loc,
3651 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003652 __ Cmp(out, cls);
3653 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01003654 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
Andreas Gampe3db70682018-12-26 15:12:03 -08003655 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003656 codegen_->AddSlowPath(slow_path);
3657 __ B(ne, slow_path->GetEntryLabel());
3658 __ Mov(out, 1);
3659 if (zero.IsLinked()) {
3660 __ B(&done);
3661 }
3662 break;
3663 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003664
Calin Juravle98893e12015-10-02 21:05:03 +01003665 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003666 case TypeCheckKind::kInterfaceCheck: {
3667 // Note that we indeed only call on slow path, but we always go
3668 // into the slow path for the unresolved and interface check
3669 // cases.
3670 //
3671 // We cannot directly call the InstanceofNonTrivial runtime
3672 // entry point without resorting to a type checking slow path
3673 // here (i.e. by calling InvokeRuntime directly), as it would
3674 // require to assign fixed registers for the inputs of this
3675 // HInstanceOf instruction (following the runtime calling
3676 // convention), which might be cluttered by the potential first
3677 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003678 //
3679 // TODO: Introduce a new runtime entry point taking the object
3680 // to test (instead of its class) as argument, and let it deal
3681 // with the read barrier issues. This will let us refactor this
3682 // case of the `switch` code as it was previously (with a direct
3683 // call to the runtime not using a type checking slow path).
3684 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003685 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01003686 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
Andreas Gampe3db70682018-12-26 15:12:03 -08003687 instruction, /* is_fatal= */ false);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003688 codegen_->AddSlowPath(slow_path);
3689 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003690 if (zero.IsLinked()) {
3691 __ B(&done);
3692 }
3693 break;
3694 }
Vladimir Marko175e7862018-03-27 09:03:13 +00003695
3696 case TypeCheckKind::kBitstringCheck: {
3697 // /* HeapReference<Class> */ temp = obj->klass_
3698 GenerateReferenceLoadTwoRegisters(instruction,
3699 out_loc,
3700 obj_loc,
3701 class_offset,
3702 maybe_temp_loc,
3703 kWithoutReadBarrier);
3704
3705 GenerateBitstringTypeCheckCompare(instruction, out);
3706 __ Cset(out, eq);
3707 if (zero.IsLinked()) {
3708 __ B(&done);
3709 }
3710 break;
3711 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003712 }
3713
3714 if (zero.IsLinked()) {
3715 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003716 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003717 }
3718
3719 if (done.IsLinked()) {
3720 __ Bind(&done);
3721 }
3722
3723 if (slow_path != nullptr) {
3724 __ Bind(slow_path->GetExitLabel());
3725 }
3726}
3727
3728void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003729 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00003730 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01003731 LocationSummary* locations =
3732 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003733 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003734 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3735 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3736 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3737 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3738 } else {
3739 locations->SetInAt(1, Location::RequiresRegister());
3740 }
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003741 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
3742 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003743}
3744
3745void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003746 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003747 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003748 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003749 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00003750 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
3751 ? Register()
3752 : InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003753 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3754 DCHECK_GE(num_temps, 1u);
3755 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003756 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003757 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
3758 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003759 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003760 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3761 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3762 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3763 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3764 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3765 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3766 const uint32_t object_array_data_offset =
3767 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003768
Vladimir Marko87584542017-12-12 17:47:52 +00003769 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003770 SlowPathCodeARM64* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003771 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
3772 instruction, is_type_check_slow_path_fatal);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003773 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003774
Scott Wakeling97c72b72016-06-24 16:19:36 +01003775 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003776 // Avoid null check if we know obj is not null.
3777 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003778 __ Cbz(obj, &done);
3779 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003780
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003781 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003782 case TypeCheckKind::kExactCheck:
3783 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003784 // /* HeapReference<Class> */ temp = obj->klass_
3785 GenerateReferenceLoadTwoRegisters(instruction,
3786 temp_loc,
3787 obj_loc,
3788 class_offset,
3789 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003790 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003791
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003792 __ Cmp(temp, cls);
3793 // Jump to slow path for throwing the exception or doing a
3794 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003795 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003796 break;
3797 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003798
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003799 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003800 // /* HeapReference<Class> */ temp = obj->klass_
3801 GenerateReferenceLoadTwoRegisters(instruction,
3802 temp_loc,
3803 obj_loc,
3804 class_offset,
3805 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003806 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003807
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003808 // If the class is abstract, we eagerly fetch the super class of the
3809 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003810 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003811 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003812 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003813 GenerateReferenceLoadOneRegister(instruction,
3814 temp_loc,
3815 super_offset,
3816 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003817 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003818
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003819 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3820 // exception.
3821 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3822 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003823 __ Cmp(temp, cls);
3824 __ B(ne, &loop);
3825 break;
3826 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003827
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003828 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003829 // /* HeapReference<Class> */ temp = obj->klass_
3830 GenerateReferenceLoadTwoRegisters(instruction,
3831 temp_loc,
3832 obj_loc,
3833 class_offset,
3834 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003835 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003836
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003837 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003838 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003839 __ Bind(&loop);
3840 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003841 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003842
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003843 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003844 GenerateReferenceLoadOneRegister(instruction,
3845 temp_loc,
3846 super_offset,
3847 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003848 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003849
3850 // If the class reference currently in `temp` is not null, jump
3851 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003852 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003853 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003854 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003855 break;
3856 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003857
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003858 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003859 // /* HeapReference<Class> */ temp = obj->klass_
3860 GenerateReferenceLoadTwoRegisters(instruction,
3861 temp_loc,
3862 obj_loc,
3863 class_offset,
3864 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003865 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003866
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003867 // Do an exact check.
3868 __ Cmp(temp, cls);
3869 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003870
3871 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003872 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003873 GenerateReferenceLoadOneRegister(instruction,
3874 temp_loc,
3875 component_offset,
3876 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003877 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003878
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003879 // If the component type is null, jump to the slow path to throw the exception.
3880 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3881 // Otherwise, the object is indeed an array. Further check that this component type is not a
3882 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003883 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3884 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003885 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003886 break;
3887 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003888
Calin Juravle98893e12015-10-02 21:05:03 +01003889 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003890 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003891 //
3892 // We cannot directly call the CheckCast runtime entry point
3893 // without resorting to a type checking slow path here (i.e. by
3894 // calling InvokeRuntime directly), as it would require to
3895 // assign fixed registers for the inputs of this HInstanceOf
3896 // instruction (following the runtime calling convention), which
3897 // might be cluttered by the potential first read barrier
3898 // emission at the beginning of this method.
3899 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003900 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003901 case TypeCheckKind::kInterfaceCheck: {
3902 // /* HeapReference<Class> */ temp = obj->klass_
3903 GenerateReferenceLoadTwoRegisters(instruction,
3904 temp_loc,
3905 obj_loc,
3906 class_offset,
3907 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003908 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003909
3910 // /* HeapReference<Class> */ temp = temp->iftable_
3911 GenerateReferenceLoadTwoRegisters(instruction,
3912 temp_loc,
3913 temp_loc,
3914 iftable_offset,
3915 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003916 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08003917 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003918 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08003919 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003920 vixl::aarch64::Label start_loop;
3921 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003922 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003923 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
3924 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003925 // Go to next interface.
3926 __ Add(temp, temp, 2 * kHeapReferenceSize);
3927 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003928 // Compare the classes and continue the loop if they do not match.
3929 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
3930 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003931 break;
3932 }
Vladimir Marko175e7862018-03-27 09:03:13 +00003933
3934 case TypeCheckKind::kBitstringCheck: {
3935 // /* HeapReference<Class> */ temp = obj->klass_
3936 GenerateReferenceLoadTwoRegisters(instruction,
3937 temp_loc,
3938 obj_loc,
3939 class_offset,
3940 maybe_temp2_loc,
3941 kWithoutReadBarrier);
3942
3943 GenerateBitstringTypeCheckCompare(instruction, temp);
3944 __ B(ne, type_check_slow_path->GetEntryLabel());
3945 break;
3946 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003947 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003948 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003949
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003950 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003951}
3952
Alexandre Rames5319def2014-10-23 10:03:10 +01003953void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003954 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01003955 locations->SetOut(Location::ConstantLocation(constant));
3956}
3957
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003958void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003959 // Will be generated at use site.
3960}
3961
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003962void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003963 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003964 locations->SetOut(Location::ConstantLocation(constant));
3965}
3966
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003967void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003968 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003969}
3970
Calin Juravle175dc732015-08-25 15:42:32 +01003971void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3972 // The trampoline uses the same calling convention as dex calling conventions,
3973 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3974 // the method_idx.
3975 HandleInvoke(invoke);
3976}
3977
3978void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3979 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
Andreas Gampe3db70682018-12-26 15:12:03 -08003980 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Calin Juravle175dc732015-08-25 15:42:32 +01003981}
3982
Alexandre Rames5319def2014-10-23 10:03:10 +01003983void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003984 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003985 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003986}
3987
Alexandre Rames67555f72014-11-18 10:55:16 +00003988void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3989 HandleInvoke(invoke);
3990}
3991
3992void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3993 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003994 LocationSummary* locations = invoke->GetLocations();
3995 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003996 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003997 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003998 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003999
4000 // The register ip1 is required to be used for the hidden argument in
4001 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01004002 MacroAssembler* masm = GetVIXLAssembler();
4003 UseScratchRegisterScope scratch_scope(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00004004 scratch_scope.Exclude(ip1);
4005 __ Mov(ip1, invoke->GetDexMethodIndex());
4006
Artem Serov914d7a82017-02-07 14:33:49 +00004007 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00004008 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004009 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00004010 {
4011 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4012 // /* HeapReference<Class> */ temp = temp->klass_
4013 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
4014 codegen_->MaybeRecordImplicitNullCheck(invoke);
4015 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004016 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00004017 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004018 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07004019 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00004020 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00004021 }
Artem Serov914d7a82017-02-07 14:33:49 +00004022
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004023 // Instead of simply (possibly) unpoisoning `temp` here, we should
4024 // emit a read barrier for the previous class reference load.
4025 // However this is not required in practice, as this is an
4026 // intermediate/temporary reference and because the current
4027 // concurrent copying collector keeps the from-space memory
4028 // intact/accessible until the end of the marking phase (the
4029 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004030 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004031 __ Ldr(temp,
4032 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4033 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004034 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004035 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004036 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004037 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004038 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004039
4040 {
4041 // Ensure the pc position is recorded immediately after the `blr` instruction.
4042 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4043
4044 // lr();
4045 __ blr(lr);
4046 DCHECK(!codegen_->IsLeafMethod());
4047 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4048 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004049
Andreas Gampe3db70682018-12-26 15:12:03 -08004050 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004051}
4052
4053void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004054 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004055 if (intrinsic.TryDispatch(invoke)) {
4056 return;
4057 }
4058
Alexandre Rames67555f72014-11-18 10:55:16 +00004059 HandleInvoke(invoke);
4060}
4061
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004062void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004063 // Explicit clinit checks triggered by static invokes must have been pruned by
4064 // art::PrepareForRegisterAllocation.
4065 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004066
Vladimir Markoca6fff82017-10-03 14:49:14 +01004067 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004068 if (intrinsic.TryDispatch(invoke)) {
4069 return;
4070 }
4071
Alexandre Rames67555f72014-11-18 10:55:16 +00004072 HandleInvoke(invoke);
4073}
4074
Andreas Gampe878d58c2015-01-15 23:24:00 -08004075static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4076 if (invoke->GetLocations()->Intrinsified()) {
4077 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4078 intrinsic.Dispatch(invoke);
4079 return true;
4080 }
4081 return false;
4082}
4083
Vladimir Markodc151b22015-10-15 18:02:30 +01004084HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4085 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +01004086 ArtMethod* method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004087 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004088 return desired_dispatch_info;
4089}
4090
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004091void CodeGeneratorARM64::GenerateStaticOrDirectCall(
4092 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004093 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004094 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4095 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004096 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4097 uint32_t offset =
4098 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004099 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004100 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004101 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004102 }
Vladimir Marko58155012015-08-19 12:49:41 +00004103 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004104 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004105 break;
Vladimir Marko65979462017-05-19 17:25:12 +01004106 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
4107 DCHECK(GetCompilerOptions().IsBootImage());
4108 // Add ADRP with its PC-relative method patch.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004109 vixl::aarch64::Label* adrp_label = NewBootImageMethodPatch(invoke->GetTargetMethod());
Vladimir Marko65979462017-05-19 17:25:12 +01004110 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4111 // Add ADD with its PC-relative method patch.
4112 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004113 NewBootImageMethodPatch(invoke->GetTargetMethod(), adrp_label);
Vladimir Marko65979462017-05-19 17:25:12 +01004114 EmitAddPlaceholder(add_label, XRegisterFrom(temp), XRegisterFrom(temp));
4115 break;
4116 }
Vladimir Markob066d432018-01-03 13:14:37 +00004117 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
4118 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004119 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00004120 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_offset);
4121 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4122 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
4123 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_offset, adrp_label);
4124 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
4125 EmitLdrOffsetPlaceholder(ldr_label, WRegisterFrom(temp), XRegisterFrom(temp));
4126 break;
4127 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004128 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Markob066d432018-01-03 13:14:37 +00004129 // Add ADRP with its PC-relative .bss entry patch.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004130 MethodReference target_method(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
4131 vixl::aarch64::Label* adrp_label = NewMethodBssEntryPatch(target_method);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004132 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Markob066d432018-01-03 13:14:37 +00004133 // Add LDR with its PC-relative .bss entry patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004134 vixl::aarch64::Label* ldr_label =
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004135 NewMethodBssEntryPatch(target_method, adrp_label);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01004136 // All aligned loads are implicitly atomic consume operations on ARM64.
Vladimir Markoaad75c62016-10-03 08:46:48 +00004137 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004138 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004139 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004140 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
4141 // Load method address from literal pool.
4142 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
4143 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004144 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
4145 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
4146 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko58155012015-08-19 12:49:41 +00004147 }
4148 }
4149
4150 switch (invoke->GetCodePtrLocation()) {
4151 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004152 {
4153 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
4154 ExactAssemblyScope eas(GetVIXLAssembler(),
4155 kInstructionSize,
4156 CodeBufferCheckScope::kExactSize);
4157 __ bl(&frame_entry_label_);
4158 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
4159 }
Vladimir Marko58155012015-08-19 12:49:41 +00004160 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004161 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4162 // LR = callee_method->entry_point_from_quick_compiled_code_;
4163 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004164 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004165 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004166 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004167 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004168 ExactAssemblyScope eas(GetVIXLAssembler(),
4169 kInstructionSize,
4170 CodeBufferCheckScope::kExactSize);
4171 // lr()
4172 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004173 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004174 }
Vladimir Marko58155012015-08-19 12:49:41 +00004175 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004176 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004177
Andreas Gampe878d58c2015-01-15 23:24:00 -08004178 DCHECK(!IsLeafMethod());
4179}
4180
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004181void CodeGeneratorARM64::GenerateVirtualCall(
4182 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004183 // Use the calling convention instead of the location of the receiver, as
4184 // intrinsics may have put the receiver in a different register. In the intrinsics
4185 // slow path, the arguments have been moved to the right place, so here we are
4186 // guaranteed that the receiver is the first register of the calling convention.
4187 InvokeDexCallingConvention calling_convention;
4188 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004189 Register temp = XRegisterFrom(temp_in);
4190 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4191 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4192 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004193 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004194
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004195 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004196
4197 {
4198 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4199 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4200 // /* HeapReference<Class> */ temp = receiver->klass_
4201 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4202 MaybeRecordImplicitNullCheck(invoke);
4203 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004204 // Instead of simply (possibly) unpoisoning `temp` here, we should
4205 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004206 // intermediate/temporary reference and because the current
4207 // concurrent copying collector keeps the from-space memory
4208 // intact/accessible until the end of the marking phase (the
4209 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004210 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4211 // temp = temp->GetMethodAt(method_offset);
4212 __ Ldr(temp, MemOperand(temp, method_offset));
4213 // lr = temp->GetEntryPoint();
4214 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004215 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004216 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004217 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4218 // lr();
4219 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004220 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004221 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004222}
4223
Orion Hodsonac141392017-01-13 11:53:47 +00004224void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4225 HandleInvoke(invoke);
4226}
4227
4228void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4229 codegen_->GenerateInvokePolymorphicCall(invoke);
Andreas Gampe3db70682018-12-26 15:12:03 -08004230 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Orion Hodsonac141392017-01-13 11:53:47 +00004231}
4232
Orion Hodson4c8e12e2018-05-18 08:33:20 +01004233void LocationsBuilderARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4234 HandleInvoke(invoke);
4235}
4236
4237void InstructionCodeGeneratorARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4238 codegen_->GenerateInvokeCustomCall(invoke);
Andreas Gampe3db70682018-12-26 15:12:03 -08004239 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Orion Hodson4c8e12e2018-05-18 08:33:20 +01004240}
4241
Vladimir Marko6fd16062018-06-26 11:02:04 +01004242vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageIntrinsicPatch(
4243 uint32_t intrinsic_data,
4244 vixl::aarch64::Label* adrp_label) {
4245 return NewPcRelativePatch(
Vladimir Marko2d06e022019-07-08 15:45:19 +01004246 /* dex_file= */ nullptr, intrinsic_data, adrp_label, &boot_image_other_patches_);
Vladimir Marko6fd16062018-06-26 11:02:04 +01004247}
4248
Vladimir Markob066d432018-01-03 13:14:37 +00004249vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageRelRoPatch(
4250 uint32_t boot_image_offset,
4251 vixl::aarch64::Label* adrp_label) {
4252 return NewPcRelativePatch(
Vladimir Marko2d06e022019-07-08 15:45:19 +01004253 /* dex_file= */ nullptr, boot_image_offset, adrp_label, &boot_image_other_patches_);
Vladimir Markob066d432018-01-03 13:14:37 +00004254}
4255
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004256vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageMethodPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004257 MethodReference target_method,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004258 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004259 return NewPcRelativePatch(
4260 target_method.dex_file, target_method.index, adrp_label, &boot_image_method_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004261}
4262
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004263vixl::aarch64::Label* CodeGeneratorARM64::NewMethodBssEntryPatch(
4264 MethodReference target_method,
4265 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004266 return NewPcRelativePatch(
4267 target_method.dex_file, target_method.index, adrp_label, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004268}
4269
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004270vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageTypePatch(
Scott Wakeling97c72b72016-06-24 16:19:36 +01004271 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004272 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004273 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004274 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &boot_image_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004275}
4276
Vladimir Marko1998cd02017-01-13 13:02:58 +00004277vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4278 const DexFile& dex_file,
4279 dex::TypeIndex type_index,
4280 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004281 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004282}
4283
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004284vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageStringPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004285 const DexFile& dex_file,
4286 dex::StringIndex string_index,
4287 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004288 return NewPcRelativePatch(
4289 &dex_file, string_index.index_, adrp_label, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01004290}
4291
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004292vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch(
4293 const DexFile& dex_file,
4294 dex::StringIndex string_index,
4295 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004296 return NewPcRelativePatch(&dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004297}
4298
Vladimir Markof6675082019-05-17 12:05:28 +01004299void CodeGeneratorARM64::EmitEntrypointThunkCall(ThreadOffset64 entrypoint_offset) {
4300 DCHECK(!__ AllowMacroInstructions()); // In ExactAssemblyScope.
4301 DCHECK(!Runtime::Current()->UseJitCompilation());
4302 call_entrypoint_patches_.emplace_back(/*dex_file*/ nullptr, entrypoint_offset.Uint32Value());
4303 vixl::aarch64::Label* bl_label = &call_entrypoint_patches_.back().label;
4304 __ bind(bl_label);
4305 __ bl(static_cast<int64_t>(0)); // Placeholder, patched at link-time.
4306}
4307
Vladimir Marko966b46f2018-08-03 10:20:19 +00004308void CodeGeneratorARM64::EmitBakerReadBarrierCbnz(uint32_t custom_data) {
Vladimir Marko94796f82018-08-08 15:15:33 +01004309 DCHECK(!__ AllowMacroInstructions()); // In ExactAssemblyScope.
Vladimir Marko966b46f2018-08-03 10:20:19 +00004310 if (Runtime::Current()->UseJitCompilation()) {
4311 auto it = jit_baker_read_barrier_slow_paths_.FindOrAdd(custom_data);
4312 vixl::aarch64::Label* slow_path_entry = &it->second.label;
4313 __ cbnz(mr, slow_path_entry);
4314 } else {
4315 baker_read_barrier_patches_.emplace_back(custom_data);
4316 vixl::aarch64::Label* cbnz_label = &baker_read_barrier_patches_.back().label;
4317 __ bind(cbnz_label);
4318 __ cbnz(mr, static_cast<int64_t>(0)); // Placeholder, patched at link-time.
4319 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004320}
4321
Scott Wakeling97c72b72016-06-24 16:19:36 +01004322vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004323 const DexFile* dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004324 uint32_t offset_or_index,
4325 vixl::aarch64::Label* adrp_label,
4326 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004327 // Add a patch entry and return the label.
4328 patches->emplace_back(dex_file, offset_or_index);
4329 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004330 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004331 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4332 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4333 return label;
4334}
4335
Scott Wakeling97c72b72016-06-24 16:19:36 +01004336vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4337 uint64_t address) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004338 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004339}
4340
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004341vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004342 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004343 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004344 return jit_string_patches_.GetOrCreate(
4345 StringReference(&dex_file, string_index),
Andreas Gampe3db70682018-12-26 15:12:03 -08004346 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); });
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004347}
4348
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004349vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004350 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004351 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004352 return jit_class_patches_.GetOrCreate(
4353 TypeReference(&dex_file, type_index),
Andreas Gampe3db70682018-12-26 15:12:03 -08004354 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); });
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004355}
4356
Vladimir Markoaad75c62016-10-03 08:46:48 +00004357void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4358 vixl::aarch64::Register reg) {
4359 DCHECK(reg.IsX());
4360 SingleEmissionCheckScope guard(GetVIXLAssembler());
4361 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004362 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004363}
4364
4365void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4366 vixl::aarch64::Register out,
4367 vixl::aarch64::Register base) {
4368 DCHECK(out.IsX());
4369 DCHECK(base.IsX());
4370 SingleEmissionCheckScope guard(GetVIXLAssembler());
4371 __ Bind(fixup_label);
4372 __ add(out, base, Operand(/* offset placeholder */ 0));
4373}
4374
4375void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4376 vixl::aarch64::Register out,
4377 vixl::aarch64::Register base) {
4378 DCHECK(base.IsX());
4379 SingleEmissionCheckScope guard(GetVIXLAssembler());
4380 __ Bind(fixup_label);
4381 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4382}
4383
Vladimir Markoeebb8212018-06-05 14:57:24 +01004384void CodeGeneratorARM64::LoadBootImageAddress(vixl::aarch64::Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01004385 uint32_t boot_image_reference) {
4386 if (GetCompilerOptions().IsBootImage()) {
4387 // Add ADRP with its PC-relative type patch.
4388 vixl::aarch64::Label* adrp_label = NewBootImageIntrinsicPatch(boot_image_reference);
4389 EmitAdrpPlaceholder(adrp_label, reg.X());
4390 // Add ADD with its PC-relative type patch.
4391 vixl::aarch64::Label* add_label = NewBootImageIntrinsicPatch(boot_image_reference, adrp_label);
4392 EmitAddPlaceholder(add_label, reg.X(), reg.X());
Vladimir Markoa2da9b92018-10-10 14:21:55 +01004393 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01004394 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004395 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004396 EmitAdrpPlaceholder(adrp_label, reg.X());
4397 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004398 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_reference, adrp_label);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004399 EmitLdrOffsetPlaceholder(ldr_label, reg.W(), reg.X());
4400 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004401 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01004402 gc::Heap* heap = Runtime::Current()->GetHeap();
4403 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004404 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01004405 __ Ldr(reg.W(), DeduplicateBootImageAddressLiteral(reinterpret_cast<uintptr_t>(address)));
4406 }
4407}
4408
Vladimir Marko6fd16062018-06-26 11:02:04 +01004409void CodeGeneratorARM64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
4410 uint32_t boot_image_offset) {
4411 DCHECK(invoke->IsStatic());
4412 InvokeRuntimeCallingConvention calling_convention;
4413 Register argument = calling_convention.GetRegisterAt(0);
4414 if (GetCompilerOptions().IsBootImage()) {
4415 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
4416 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
4417 MethodReference target_method = invoke->GetTargetMethod();
4418 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
4419 // Add ADRP with its PC-relative type patch.
4420 vixl::aarch64::Label* adrp_label = NewBootImageTypePatch(*target_method.dex_file, type_idx);
4421 EmitAdrpPlaceholder(adrp_label, argument.X());
4422 // Add ADD with its PC-relative type patch.
4423 vixl::aarch64::Label* add_label =
4424 NewBootImageTypePatch(*target_method.dex_file, type_idx, adrp_label);
4425 EmitAddPlaceholder(add_label, argument.X(), argument.X());
4426 } else {
4427 LoadBootImageAddress(argument, boot_image_offset);
4428 }
4429 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
4430 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4431}
4432
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004433template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00004434inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4435 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004436 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004437 for (const PcRelativePatchInfo& info : infos) {
4438 linker_patches->push_back(Factory(info.label.GetLocation(),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004439 info.target_dex_file,
Vladimir Markoaad75c62016-10-03 08:46:48 +00004440 info.pc_insn_label->GetLocation(),
4441 info.offset_or_index));
4442 }
4443}
4444
Vladimir Marko6fd16062018-06-26 11:02:04 +01004445template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
4446linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
4447 const DexFile* target_dex_file,
4448 uint32_t pc_insn_offset,
4449 uint32_t boot_image_offset) {
4450 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
4451 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00004452}
4453
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004454void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00004455 DCHECK(linker_patches->empty());
4456 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004457 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004458 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004459 boot_image_type_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004460 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004461 boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004462 string_bss_entry_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01004463 boot_image_other_patches_.size() +
Vladimir Markof6675082019-05-17 12:05:28 +01004464 call_entrypoint_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004465 baker_read_barrier_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004466 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01004467 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004468 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004469 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004470 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004471 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004472 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004473 boot_image_string_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01004474 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01004475 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004476 DCHECK(boot_image_type_patches_.empty());
4477 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01004478 }
4479 if (GetCompilerOptions().IsBootImage()) {
4480 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
4481 boot_image_other_patches_, linker_patches);
4482 } else {
4483 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
4484 boot_image_other_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004485 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004486 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
4487 method_bss_entry_patches_, linker_patches);
4488 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
4489 type_bss_entry_patches_, linker_patches);
4490 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
4491 string_bss_entry_patches_, linker_patches);
Vladimir Markof6675082019-05-17 12:05:28 +01004492 for (const PatchInfo<vixl::aarch64::Label>& info : call_entrypoint_patches_) {
4493 DCHECK(info.target_dex_file == nullptr);
4494 linker_patches->push_back(linker::LinkerPatch::CallEntrypointPatch(
4495 info.label.GetLocation(), info.offset_or_index));
4496 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004497 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004498 linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
4499 info.label.GetLocation(), info.custom_data));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004500 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004501 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004502}
4503
Vladimir Markoca1e0382018-04-11 09:58:41 +00004504bool CodeGeneratorARM64::NeedsThunkCode(const linker::LinkerPatch& patch) const {
Vladimir Markof6675082019-05-17 12:05:28 +01004505 return patch.GetType() == linker::LinkerPatch::Type::kCallEntrypoint ||
4506 patch.GetType() == linker::LinkerPatch::Type::kBakerReadBarrierBranch ||
Vladimir Markoca1e0382018-04-11 09:58:41 +00004507 patch.GetType() == linker::LinkerPatch::Type::kCallRelative;
4508}
4509
4510void CodeGeneratorARM64::EmitThunkCode(const linker::LinkerPatch& patch,
4511 /*out*/ ArenaVector<uint8_t>* code,
4512 /*out*/ std::string* debug_name) {
4513 Arm64Assembler assembler(GetGraph()->GetAllocator());
4514 switch (patch.GetType()) {
4515 case linker::LinkerPatch::Type::kCallRelative: {
4516 // The thunk just uses the entry point in the ArtMethod. This works even for calls
4517 // to the generic JNI and interpreter trampolines.
4518 Offset offset(ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4519 kArm64PointerSize).Int32Value());
4520 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0));
4521 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
4522 *debug_name = "MethodCallThunk";
4523 }
4524 break;
4525 }
Vladimir Markof6675082019-05-17 12:05:28 +01004526 case linker::LinkerPatch::Type::kCallEntrypoint: {
4527 Offset offset(patch.EntrypointOffset());
4528 assembler.JumpTo(ManagedRegister(arm64::TR), offset, ManagedRegister(arm64::IP0));
4529 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
4530 *debug_name = "EntrypointCallThunk_" + std::to_string(offset.Uint32Value());
4531 }
4532 break;
4533 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00004534 case linker::LinkerPatch::Type::kBakerReadBarrierBranch: {
4535 DCHECK_EQ(patch.GetBakerCustomValue2(), 0u);
4536 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name);
4537 break;
4538 }
4539 default:
4540 LOG(FATAL) << "Unexpected patch type " << patch.GetType();
4541 UNREACHABLE();
4542 }
4543
4544 // Ensure we emit the literal pool if any.
4545 assembler.FinalizeCode();
4546 code->resize(assembler.CodeSize());
4547 MemoryRegion code_region(code->data(), code->size());
4548 assembler.FinalizeInstructions(code_region);
4549}
4550
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004551vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value) {
4552 return uint32_literals_.GetOrCreate(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004553 value,
4554 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4555}
4556
Scott Wakeling97c72b72016-06-24 16:19:36 +01004557vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004558 return uint64_literals_.GetOrCreate(
4559 value,
4560 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004561}
4562
Andreas Gampe878d58c2015-01-15 23:24:00 -08004563void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004564 // Explicit clinit checks triggered by static invokes must have been pruned by
4565 // art::PrepareForRegisterAllocation.
4566 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004567
Andreas Gampe878d58c2015-01-15 23:24:00 -08004568 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Andreas Gampe3db70682018-12-26 15:12:03 -08004569 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004570 return;
4571 }
4572
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004573 {
4574 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4575 // are no pools emitted.
4576 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4577 LocationSummary* locations = invoke->GetLocations();
4578 codegen_->GenerateStaticOrDirectCall(
4579 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
4580 }
4581
Andreas Gampe3db70682018-12-26 15:12:03 -08004582 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004583}
4584
4585void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004586 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Andreas Gampe3db70682018-12-26 15:12:03 -08004587 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004588 return;
4589 }
4590
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004591 {
4592 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
4593 // are no pools emitted.
4594 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4595 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
4596 DCHECK(!codegen_->IsLeafMethod());
4597 }
4598
Andreas Gampe3db70682018-12-26 15:12:03 -08004599 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004600}
4601
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004602HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4603 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004604 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004605 case HLoadClass::LoadKind::kInvalid:
4606 LOG(FATAL) << "UNREACHABLE";
4607 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004608 case HLoadClass::LoadKind::kReferrersClass:
4609 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004610 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004611 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004612 case HLoadClass::LoadKind::kBssEntry:
4613 DCHECK(!Runtime::Current()->UseJitCompilation());
4614 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004615 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004616 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004617 DCHECK(Runtime::Current()->UseJitCompilation());
4618 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004619 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004620 break;
4621 }
4622 return desired_class_load_kind;
4623}
4624
Alexandre Rames67555f72014-11-18 10:55:16 +00004625void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00004626 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004627 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004628 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00004629 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004630 cls,
4631 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00004632 LocationFrom(vixl::aarch64::x0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00004633 DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004634 return;
4635 }
Vladimir Marko41559982017-01-06 14:04:23 +00004636 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004637
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004638 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
4639 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004640 ? LocationSummary::kCallOnSlowPath
4641 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01004642 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004643 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004644 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004645 }
4646
Vladimir Marko41559982017-01-06 14:04:23 +00004647 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004648 locations->SetInAt(0, Location::RequiresRegister());
4649 }
4650 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004651 if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
4652 if (!kUseReadBarrier || kUseBakerReadBarrier) {
4653 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01004654 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004655 } else {
4656 // For non-Baker read barrier we have a temp-clobbering call.
4657 }
4658 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004659}
4660
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004661// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4662// move.
4663void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00004664 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004665 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00004666 codegen_->GenerateLoadClassRuntimeCall(cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08004667 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Calin Juravle580b6092015-10-06 17:35:58 +01004668 return;
4669 }
Vladimir Marko41559982017-01-06 14:04:23 +00004670 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01004671
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004672 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004673 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004674
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004675 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
4676 ? kWithoutReadBarrier
4677 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004678 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00004679 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004680 case HLoadClass::LoadKind::kReferrersClass: {
4681 DCHECK(!cls->CanCallRuntime());
4682 DCHECK(!cls->MustGenerateClinitCheck());
4683 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4684 Register current_method = InputRegisterAt(cls, 0);
Vladimir Markoca1e0382018-04-11 09:58:41 +00004685 codegen_->GenerateGcRootFieldLoad(cls,
4686 out_loc,
4687 current_method,
4688 ArtMethod::DeclaringClassOffset().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08004689 /* fixup_label= */ nullptr,
Vladimir Markoca1e0382018-04-11 09:58:41 +00004690 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004691 break;
4692 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004693 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004694 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004695 // Add ADRP with its PC-relative type patch.
4696 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08004697 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004698 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004699 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004700 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004701 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004702 codegen_->NewBootImageTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004703 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004704 break;
4705 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004706 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004707 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004708 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
4709 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
4710 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004711 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004712 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004713 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004714 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004715 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004716 break;
4717 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004718 case HLoadClass::LoadKind::kBssEntry: {
4719 // Add ADRP with its PC-relative Class .bss entry patch.
4720 const DexFile& dex_file = cls->GetDexFile();
4721 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Markof3c52b42017-11-17 17:32:12 +00004722 vixl::aarch64::Register temp = XRegisterFrom(out_loc);
4723 vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
4724 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004725 // Add LDR with its PC-relative Class .bss entry patch.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004726 vixl::aarch64::Label* ldr_label =
Vladimir Markof3c52b42017-11-17 17:32:12 +00004727 codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004728 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markod5fd5c32019-07-02 14:46:32 +01004729 // All aligned loads are implicitly atomic consume operations on ARM64.
Vladimir Markoca1e0382018-04-11 09:58:41 +00004730 codegen_->GenerateGcRootFieldLoad(cls,
4731 out_loc,
4732 temp,
4733 /* offset placeholder */ 0u,
4734 ldr_label,
4735 read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004736 generate_null_check = true;
4737 break;
4738 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004739 case HLoadClass::LoadKind::kJitBootImageAddress: {
4740 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
4741 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
4742 DCHECK_NE(address, 0u);
4743 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
4744 break;
4745 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004746 case HLoadClass::LoadKind::kJitTableAddress: {
4747 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
4748 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004749 cls->GetClass()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00004750 codegen_->GenerateGcRootFieldLoad(cls,
4751 out_loc,
4752 out.X(),
Andreas Gampe3db70682018-12-26 15:12:03 -08004753 /* offset= */ 0,
4754 /* fixup_label= */ nullptr,
Vladimir Markoca1e0382018-04-11 09:58:41 +00004755 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004756 break;
4757 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004758 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004759 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00004760 LOG(FATAL) << "UNREACHABLE";
4761 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004762 }
4763
Vladimir Markoea4c1262017-02-06 19:59:33 +00004764 bool do_clinit = cls->MustGenerateClinitCheck();
4765 if (generate_null_check || do_clinit) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004766 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01004767 SlowPathCodeARM64* slow_path =
4768 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004769 codegen_->AddSlowPath(slow_path);
4770 if (generate_null_check) {
4771 __ Cbz(out, slow_path->GetEntryLabel());
4772 }
4773 if (cls->MustGenerateClinitCheck()) {
4774 GenerateClassInitializationCheck(slow_path, out);
4775 } else {
4776 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004777 }
Andreas Gampe3db70682018-12-26 15:12:03 -08004778 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004779 }
4780}
4781
Orion Hodsondbaa5c72018-05-10 08:22:46 +01004782void LocationsBuilderARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
4783 InvokeRuntimeCallingConvention calling_convention;
4784 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
4785 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
4786}
4787
4788void InstructionCodeGeneratorARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
4789 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
4790}
4791
Orion Hodson18259d72018-04-12 11:18:23 +01004792void LocationsBuilderARM64::VisitLoadMethodType(HLoadMethodType* load) {
4793 InvokeRuntimeCallingConvention calling_convention;
4794 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
4795 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
4796}
4797
4798void InstructionCodeGeneratorARM64::VisitLoadMethodType(HLoadMethodType* load) {
4799 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
4800}
4801
David Brazdilcb1c0552015-08-04 16:22:25 +01004802static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004803 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004804}
4805
Alexandre Rames67555f72014-11-18 10:55:16 +00004806void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4807 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004808 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexandre Rames67555f72014-11-18 10:55:16 +00004809 locations->SetOut(Location::RequiresRegister());
4810}
4811
4812void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004813 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4814}
4815
4816void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004817 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01004818}
4819
4820void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4821 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004822}
4823
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004824HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4825 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004826 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004827 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004828 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00004829 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01004830 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004831 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004832 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004833 case HLoadString::LoadKind::kJitTableAddress:
4834 DCHECK(Runtime::Current()->UseJitCompilation());
4835 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004836 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004837 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004838 }
4839 return desired_string_load_kind;
4840}
4841
Alexandre Rames67555f72014-11-18 10:55:16 +00004842void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004843 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004844 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004845 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004846 InvokeRuntimeCallingConvention calling_convention;
4847 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
4848 } else {
4849 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004850 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
4851 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00004852 // Rely on the pResolveString and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01004853 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004854 } else {
4855 // For non-Baker read barrier we have a temp-clobbering call.
4856 }
4857 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004858 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004859}
4860
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004861// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4862// move.
4863void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00004864 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004865 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004866
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004867 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004868 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004869 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004870 // Add ADRP with its PC-relative String patch.
4871 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004872 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004873 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004874 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004875 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004876 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004877 codegen_->NewBootImageStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004878 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004879 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004880 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004881 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004882 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004883 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
4884 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
4885 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004886 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004887 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004888 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004889 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004890 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
4891 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004892 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004893 case HLoadString::LoadKind::kBssEntry: {
4894 // Add ADRP with its PC-relative String .bss entry patch.
4895 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004896 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markof3c52b42017-11-17 17:32:12 +00004897 Register temp = XRegisterFrom(out_loc);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004898 vixl::aarch64::Label* adrp_label = codegen_->NewStringBssEntryPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004899 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004900 // Add LDR with its PC-relative String .bss entry patch.
Vladimir Markoaad75c62016-10-03 08:46:48 +00004901 vixl::aarch64::Label* ldr_label =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004902 codegen_->NewStringBssEntryPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004903 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markod5fd5c32019-07-02 14:46:32 +01004904 // All aligned loads are implicitly atomic consume operations on ARM64.
Vladimir Markoca1e0382018-04-11 09:58:41 +00004905 codegen_->GenerateGcRootFieldLoad(load,
4906 out_loc,
4907 temp,
4908 /* offset placeholder */ 0u,
4909 ldr_label,
4910 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004911 SlowPathCodeARM64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00004912 new (codegen_->GetScopedAllocator()) LoadStringSlowPathARM64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004913 codegen_->AddSlowPath(slow_path);
4914 __ Cbz(out.X(), slow_path->GetEntryLabel());
4915 __ Bind(slow_path->GetExitLabel());
Andreas Gampe3db70682018-12-26 15:12:03 -08004916 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004917 return;
4918 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004919 case HLoadString::LoadKind::kJitBootImageAddress: {
4920 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
4921 DCHECK_NE(address, 0u);
4922 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
4923 return;
4924 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004925 case HLoadString::LoadKind::kJitTableAddress: {
4926 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004927 load->GetStringIndex(),
4928 load->GetString()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00004929 codegen_->GenerateGcRootFieldLoad(load,
4930 out_loc,
4931 out.X(),
Andreas Gampe3db70682018-12-26 15:12:03 -08004932 /* offset= */ 0,
4933 /* fixup_label= */ nullptr,
Vladimir Markoca1e0382018-04-11 09:58:41 +00004934 kCompilerReadBarrierOption);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004935 return;
4936 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004937 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004938 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004939 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004940
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004941 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004942 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004943 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004944 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004945 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
4946 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Andreas Gampe3db70682018-12-26 15:12:03 -08004947 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004948}
4949
Alexandre Rames5319def2014-10-23 10:03:10 +01004950void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004951 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01004952 locations->SetOut(Location::ConstantLocation(constant));
4953}
4954
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004955void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004956 // Will be generated at use site.
4957}
4958
Alexandre Rames67555f72014-11-18 10:55:16 +00004959void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004960 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4961 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004962 InvokeRuntimeCallingConvention calling_convention;
4963 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4964}
4965
4966void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004967 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004968 instruction,
4969 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004970 if (instruction->IsEnter()) {
4971 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4972 } else {
4973 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4974 }
Andreas Gampe3db70682018-12-26 15:12:03 -08004975 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004976}
4977
Alexandre Rames42d641b2014-10-27 14:00:51 +00004978void LocationsBuilderARM64::VisitMul(HMul* mul) {
4979 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004980 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004981 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004982 case DataType::Type::kInt32:
4983 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00004984 locations->SetInAt(0, Location::RequiresRegister());
4985 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004986 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004987 break;
4988
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004989 case DataType::Type::kFloat32:
4990 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004991 locations->SetInAt(0, Location::RequiresFpuRegister());
4992 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004993 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004994 break;
4995
4996 default:
4997 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4998 }
4999}
5000
5001void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
5002 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005003 case DataType::Type::kInt32:
5004 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005005 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
5006 break;
5007
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005008 case DataType::Type::kFloat32:
5009 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005010 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00005011 break;
5012
5013 default:
5014 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5015 }
5016}
5017
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005018void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
5019 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005020 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005021 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005022 case DataType::Type::kInt32:
5023 case DataType::Type::kInt64:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00005024 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00005025 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005026 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005027
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005028 case DataType::Type::kFloat32:
5029 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005030 locations->SetInAt(0, Location::RequiresFpuRegister());
5031 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005032 break;
5033
5034 default:
5035 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5036 }
5037}
5038
5039void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
5040 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005041 case DataType::Type::kInt32:
5042 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005043 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
5044 break;
5045
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005046 case DataType::Type::kFloat32:
5047 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005048 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005049 break;
5050
5051 default:
5052 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5053 }
5054}
5055
5056void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005057 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5058 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005059 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005060 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005061 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5062 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005063}
5064
5065void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01005066 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
5067 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00005068 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005069 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Andreas Gampe3db70682018-12-26 15:12:03 -08005070 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005071}
5072
Alexandre Rames5319def2014-10-23 10:03:10 +01005073void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005074 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5075 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01005076 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07005077 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005078 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexandre Rames5319def2014-10-23 10:03:10 +01005079}
5080
5081void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07005082 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
5083 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Andreas Gampe3db70682018-12-26 15:12:03 -08005084 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005085}
5086
5087void LocationsBuilderARM64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005088 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00005089 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005090 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01005091}
5092
5093void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005094 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005095 case DataType::Type::kInt32:
5096 case DataType::Type::kInt64:
Roland Levillain55dcfb52014-10-24 18:09:09 +01005097 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01005098 break;
5099
5100 default:
5101 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5102 }
5103}
5104
David Brazdil66d126e2015-04-03 16:02:44 +01005105void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005106 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
David Brazdil66d126e2015-04-03 16:02:44 +01005107 locations->SetInAt(0, Location::RequiresRegister());
5108 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5109}
5110
5111void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005112 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01005113}
5114
Alexandre Rames5319def2014-10-23 10:03:10 +01005115void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005116 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5117 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01005118}
5119
Calin Juravle2ae48182016-03-16 14:05:09 +00005120void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5121 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005122 return;
5123 }
Artem Serov914d7a82017-02-07 14:33:49 +00005124 {
Nicolas Geoffray61ba8d22018-08-07 09:55:57 +01005125 // Ensure that between load and RecordPcInfo there are no pools emitted.
Artem Serov914d7a82017-02-07 14:33:49 +00005126 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5127 Location obj = instruction->GetLocations()->InAt(0);
5128 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
5129 RecordPcInfo(instruction, instruction->GetDexPc());
5130 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005131}
5132
Calin Juravle2ae48182016-03-16 14:05:09 +00005133void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005134 SlowPathCodeARM64* slow_path = new (GetScopedAllocator()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005135 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01005136
5137 LocationSummary* locations = instruction->GetLocations();
5138 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005139
5140 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005141}
5142
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005143void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005144 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005145}
5146
Alexandre Rames67555f72014-11-18 10:55:16 +00005147void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5148 HandleBinaryOp(instruction);
5149}
5150
5151void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5152 HandleBinaryOp(instruction);
5153}
5154
Alexandre Rames3e69f162014-12-10 10:36:50 +00005155void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5156 LOG(FATAL) << "Unreachable";
5157}
5158
5159void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005160 if (instruction->GetNext()->IsSuspendCheck() &&
5161 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5162 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5163 // The back edge will generate the suspend check.
5164 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5165 }
5166
Alexandre Rames3e69f162014-12-10 10:36:50 +00005167 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5168}
5169
Alexandre Rames5319def2014-10-23 10:03:10 +01005170void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005171 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005172 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5173 if (location.IsStackSlot()) {
5174 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5175 } else if (location.IsDoubleStackSlot()) {
5176 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5177 }
5178 locations->SetOut(location);
5179}
5180
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005181void InstructionCodeGeneratorARM64::VisitParameterValue(
5182 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005183 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005184}
5185
5186void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5187 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005188 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005189 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005190}
5191
5192void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5193 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5194 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005195}
5196
5197void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005198 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005199 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005200 locations->SetInAt(i, Location::Any());
5201 }
5202 locations->SetOut(Location::Any());
5203}
5204
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005205void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005206 LOG(FATAL) << "Unreachable";
5207}
5208
Serban Constantinescu02164b32014-11-13 14:05:07 +00005209void LocationsBuilderARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005210 DataType::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005211 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005212 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005213 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005214 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005215
5216 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005217 case DataType::Type::kInt32:
5218 case DataType::Type::kInt64:
Serban Constantinescu02164b32014-11-13 14:05:07 +00005219 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005220 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005221 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5222 break;
5223
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005224 case DataType::Type::kFloat32:
5225 case DataType::Type::kFloat64: {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005226 InvokeRuntimeCallingConvention calling_convention;
5227 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5228 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5229 locations->SetOut(calling_convention.GetReturnLocation(type));
5230
5231 break;
5232 }
5233
Serban Constantinescu02164b32014-11-13 14:05:07 +00005234 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005235 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005236 }
5237}
5238
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005239void InstructionCodeGeneratorARM64::GenerateIntRemForPower2Denom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005240 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005241 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
5242 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
5243
5244 Register out = OutputRegister(instruction);
5245 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005246
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005247 if (abs_imm == 2) {
5248 __ Cmp(dividend, 0);
5249 __ And(out, dividend, 1);
5250 __ Csneg(out, out, out, ge);
5251 } else {
5252 UseScratchRegisterScope temps(GetVIXLAssembler());
5253 Register temp = temps.AcquireSameSizeAs(out);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005254
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005255 __ Negs(temp, dividend);
5256 __ And(out, dividend, abs_imm - 1);
5257 __ And(temp, temp, abs_imm - 1);
5258 __ Csneg(out, out, temp, mi);
5259 }
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005260}
5261
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005262void InstructionCodeGeneratorARM64::GenerateIntRemForConstDenom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005263 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005264
5265 if (imm == 0) {
5266 // Do not generate anything.
5267 // DivZeroCheck would prevent any code to be executed.
5268 return;
5269 }
5270
Evgeny Astigeevichf58dc652018-06-25 17:54:07 +01005271 if (IsPowerOfTwo(AbsOrMin(imm))) {
5272 // Cases imm == -1 or imm == 1 are handled in constant folding by
5273 // InstructionWithAbsorbingInputSimplifier.
5274 // If the cases have survided till code generation they are handled in
5275 // GenerateIntRemForPower2Denom becauses -1 and 1 are the power of 2 (2^0).
5276 // The correct code is generated for them, just more instructions.
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005277 GenerateIntRemForPower2Denom(instruction);
5278 } else {
5279 DCHECK(imm < -2 || imm > 2) << imm;
5280 GenerateDivRemWithAnyConstant(instruction);
5281 }
5282}
5283
5284void InstructionCodeGeneratorARM64::GenerateIntRem(HRem* instruction) {
5285 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
5286 << instruction->GetResultType();
5287
5288 if (instruction->GetLocations()->InAt(1).IsConstant()) {
5289 GenerateIntRemForConstDenom(instruction);
5290 } else {
5291 Register out = OutputRegister(instruction);
5292 Register dividend = InputRegisterAt(instruction, 0);
5293 Register divisor = InputRegisterAt(instruction, 1);
5294 UseScratchRegisterScope temps(GetVIXLAssembler());
5295 Register temp = temps.AcquireSameSizeAs(out);
5296 __ Sdiv(temp, dividend, divisor);
5297 __ Msub(out, temp, divisor, dividend);
5298 }
5299}
5300
Serban Constantinescu02164b32014-11-13 14:05:07 +00005301void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005302 DataType::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005303
Serban Constantinescu02164b32014-11-13 14:05:07 +00005304 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005305 case DataType::Type::kInt32:
5306 case DataType::Type::kInt64: {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005307 GenerateIntRem(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005308 break;
5309 }
5310
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005311 case DataType::Type::kFloat32:
5312 case DataType::Type::kFloat64: {
5313 QuickEntrypointEnum entrypoint =
5314 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005315 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005316 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00005317 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5318 } else {
5319 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5320 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005321 break;
5322 }
5323
Serban Constantinescu02164b32014-11-13 14:05:07 +00005324 default:
5325 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005326 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005327 }
5328}
5329
Aart Bik1f8d51b2018-02-15 10:42:37 -08005330void LocationsBuilderARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005331 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005332}
5333
Aart Bik1f8d51b2018-02-15 10:42:37 -08005334void InstructionCodeGeneratorARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005335 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005336}
5337
5338void LocationsBuilderARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005339 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005340}
5341
5342void InstructionCodeGeneratorARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005343 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005344}
5345
Aart Bik3dad3412018-02-28 12:01:46 -08005346void LocationsBuilderARM64::VisitAbs(HAbs* abs) {
5347 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
5348 switch (abs->GetResultType()) {
5349 case DataType::Type::kInt32:
5350 case DataType::Type::kInt64:
5351 locations->SetInAt(0, Location::RequiresRegister());
5352 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5353 break;
5354 case DataType::Type::kFloat32:
5355 case DataType::Type::kFloat64:
5356 locations->SetInAt(0, Location::RequiresFpuRegister());
5357 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5358 break;
5359 default:
5360 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5361 }
5362}
5363
5364void InstructionCodeGeneratorARM64::VisitAbs(HAbs* abs) {
5365 switch (abs->GetResultType()) {
5366 case DataType::Type::kInt32:
5367 case DataType::Type::kInt64: {
5368 Register in_reg = InputRegisterAt(abs, 0);
5369 Register out_reg = OutputRegister(abs);
5370 __ Cmp(in_reg, Operand(0));
5371 __ Cneg(out_reg, in_reg, lt);
5372 break;
5373 }
5374 case DataType::Type::kFloat32:
5375 case DataType::Type::kFloat64: {
5376 FPRegister in_reg = InputFPRegisterAt(abs, 0);
5377 FPRegister out_reg = OutputFPRegister(abs);
5378 __ Fabs(out_reg, in_reg);
5379 break;
5380 }
5381 default:
5382 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5383 }
5384}
5385
Igor Murashkind01745e2017-04-05 16:40:31 -07005386void LocationsBuilderARM64::VisitConstructorFence(HConstructorFence* constructor_fence) {
5387 constructor_fence->SetLocations(nullptr);
5388}
5389
5390void InstructionCodeGeneratorARM64::VisitConstructorFence(
5391 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
5392 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
5393}
5394
Calin Juravle27df7582015-04-17 19:12:31 +01005395void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5396 memory_barrier->SetLocations(nullptr);
5397}
5398
5399void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005400 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005401}
5402
Alexandre Rames5319def2014-10-23 10:03:10 +01005403void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005404 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005405 DataType::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005406 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005407}
5408
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005409void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005410 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005411}
5412
5413void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5414 instruction->SetLocations(nullptr);
5415}
5416
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005417void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005418 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005419}
5420
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005421void LocationsBuilderARM64::VisitRor(HRor* ror) {
5422 HandleBinaryOp(ror);
5423}
5424
5425void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5426 HandleBinaryOp(ror);
5427}
5428
Serban Constantinescu02164b32014-11-13 14:05:07 +00005429void LocationsBuilderARM64::VisitShl(HShl* shl) {
5430 HandleShift(shl);
5431}
5432
5433void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5434 HandleShift(shl);
5435}
5436
5437void LocationsBuilderARM64::VisitShr(HShr* shr) {
5438 HandleShift(shr);
5439}
5440
5441void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5442 HandleShift(shr);
5443}
5444
Alexandre Rames5319def2014-10-23 10:03:10 +01005445void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005446 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005447}
5448
5449void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005450 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005451}
5452
Alexandre Rames67555f72014-11-18 10:55:16 +00005453void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005454 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005455}
5456
5457void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005458 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005459}
5460
5461void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005462 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005463}
5464
Alexandre Rames67555f72014-11-18 10:55:16 +00005465void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005466 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005467}
5468
Vladimir Marko552a1342017-10-31 10:56:47 +00005469void LocationsBuilderARM64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5470 codegen_->CreateStringBuilderAppendLocations(instruction, LocationFrom(x0));
5471}
5472
5473void InstructionCodeGeneratorARM64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5474 __ Mov(w0, instruction->GetFormat()->GetValue());
5475 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
5476}
5477
Calin Juravlee460d1d2015-09-29 04:52:17 +01005478void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5479 HUnresolvedInstanceFieldGet* instruction) {
5480 FieldAccessCallingConventionARM64 calling_convention;
5481 codegen_->CreateUnresolvedFieldLocationSummary(
5482 instruction, instruction->GetFieldType(), calling_convention);
5483}
5484
5485void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5486 HUnresolvedInstanceFieldGet* instruction) {
5487 FieldAccessCallingConventionARM64 calling_convention;
5488 codegen_->GenerateUnresolvedFieldAccess(instruction,
5489 instruction->GetFieldType(),
5490 instruction->GetFieldIndex(),
5491 instruction->GetDexPc(),
5492 calling_convention);
5493}
5494
5495void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5496 HUnresolvedInstanceFieldSet* instruction) {
5497 FieldAccessCallingConventionARM64 calling_convention;
5498 codegen_->CreateUnresolvedFieldLocationSummary(
5499 instruction, instruction->GetFieldType(), calling_convention);
5500}
5501
5502void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5503 HUnresolvedInstanceFieldSet* instruction) {
5504 FieldAccessCallingConventionARM64 calling_convention;
5505 codegen_->GenerateUnresolvedFieldAccess(instruction,
5506 instruction->GetFieldType(),
5507 instruction->GetFieldIndex(),
5508 instruction->GetDexPc(),
5509 calling_convention);
5510}
5511
5512void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5513 HUnresolvedStaticFieldGet* instruction) {
5514 FieldAccessCallingConventionARM64 calling_convention;
5515 codegen_->CreateUnresolvedFieldLocationSummary(
5516 instruction, instruction->GetFieldType(), calling_convention);
5517}
5518
5519void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5520 HUnresolvedStaticFieldGet* instruction) {
5521 FieldAccessCallingConventionARM64 calling_convention;
5522 codegen_->GenerateUnresolvedFieldAccess(instruction,
5523 instruction->GetFieldType(),
5524 instruction->GetFieldIndex(),
5525 instruction->GetDexPc(),
5526 calling_convention);
5527}
5528
5529void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5530 HUnresolvedStaticFieldSet* instruction) {
5531 FieldAccessCallingConventionARM64 calling_convention;
5532 codegen_->CreateUnresolvedFieldLocationSummary(
5533 instruction, instruction->GetFieldType(), calling_convention);
5534}
5535
5536void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5537 HUnresolvedStaticFieldSet* instruction) {
5538 FieldAccessCallingConventionARM64 calling_convention;
5539 codegen_->GenerateUnresolvedFieldAccess(instruction,
5540 instruction->GetFieldType(),
5541 instruction->GetFieldIndex(),
5542 instruction->GetDexPc(),
5543 calling_convention);
5544}
5545
Alexandre Rames5319def2014-10-23 10:03:10 +01005546void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005547 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5548 instruction, LocationSummary::kCallOnSlowPath);
Artem Serov7957d952017-04-04 15:44:09 +01005549 // In suspend check slow path, usually there are no caller-save registers at all.
5550 // If SIMD instructions are present, however, we force spilling all live SIMD
5551 // registers in full width (since the runtime only saves/restores lower part).
5552 locations->SetCustomSlowPathCallerSaves(
5553 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexandre Rames5319def2014-10-23 10:03:10 +01005554}
5555
5556void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005557 HBasicBlock* block = instruction->GetBlock();
5558 if (block->GetLoopInformation() != nullptr) {
5559 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5560 // The back edge will generate the suspend check.
5561 return;
5562 }
5563 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5564 // The goto will generate the suspend check.
5565 return;
5566 }
5567 GenerateSuspendCheck(instruction, nullptr);
Andreas Gampe3db70682018-12-26 15:12:03 -08005568 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005569}
5570
Alexandre Rames67555f72014-11-18 10:55:16 +00005571void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005572 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5573 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005574 InvokeRuntimeCallingConvention calling_convention;
5575 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5576}
5577
5578void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005579 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005580 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005581}
5582
5583void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
5584 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005585 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005586 DataType::Type input_type = conversion->GetInputType();
5587 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005588 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5589 << input_type << " -> " << result_type;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005590 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
5591 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005592 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5593 }
5594
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005595 if (DataType::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005596 locations->SetInAt(0, Location::RequiresFpuRegister());
5597 } else {
5598 locations->SetInAt(0, Location::RequiresRegister());
5599 }
5600
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005601 if (DataType::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005602 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5603 } else {
5604 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5605 }
5606}
5607
5608void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005609 DataType::Type result_type = conversion->GetResultType();
5610 DataType::Type input_type = conversion->GetInputType();
Alexandre Rames67555f72014-11-18 10:55:16 +00005611
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005612 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5613 << input_type << " -> " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005614
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005615 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
5616 int result_size = DataType::Size(result_type);
5617 int input_size = DataType::Size(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00005618 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005619 Register output = OutputRegister(conversion);
5620 Register source = InputRegisterAt(conversion, 0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005621 if (result_type == DataType::Type::kInt32 && input_type == DataType::Type::kInt64) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01005622 // 'int' values are used directly as W registers, discarding the top
5623 // bits, so we don't need to sign-extend and can just perform a move.
5624 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
5625 // top 32 bits of the target register. We theoretically could leave those
5626 // bits unchanged, but we would have to make sure that no code uses a
5627 // 32bit input value as a 64bit value assuming that the top 32 bits are
5628 // zero.
5629 __ Mov(output.W(), source.W());
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005630 } else if (DataType::IsUnsignedType(result_type) ||
5631 (DataType::IsUnsignedType(input_type) && input_size < result_size)) {
5632 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, result_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005633 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00005634 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005635 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005636 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005637 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005638 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
5639 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005640 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005641 } else if (DataType::IsFloatingPointType(result_type) &&
5642 DataType::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005643 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
5644 } else {
5645 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
5646 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005647 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00005648}
Alexandre Rames67555f72014-11-18 10:55:16 +00005649
Serban Constantinescu02164b32014-11-13 14:05:07 +00005650void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
5651 HandleShift(ushr);
5652}
5653
5654void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
5655 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00005656}
5657
5658void LocationsBuilderARM64::VisitXor(HXor* instruction) {
5659 HandleBinaryOp(instruction);
5660}
5661
5662void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
5663 HandleBinaryOp(instruction);
5664}
5665
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005666void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005667 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005668 LOG(FATAL) << "Unreachable";
5669}
5670
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005671void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005672 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005673 LOG(FATAL) << "Unreachable";
5674}
5675
Mark Mendellfe57faa2015-09-18 09:26:15 -04005676// Simple implementation of packed switch - generate cascaded compare/jumps.
5677void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5678 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005679 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005680 locations->SetInAt(0, Location::RequiresRegister());
5681}
5682
5683void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5684 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08005685 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04005686 Register value_reg = InputRegisterAt(switch_instr, 0);
5687 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
5688
Zheng Xu3927c8b2015-11-18 17:46:25 +08005689 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005690 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08005691 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
5692 // make sure we don't emit it if the target may run out of range.
5693 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
5694 // ranges and emit the tables only as required.
5695 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04005696
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005697 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08005698 // Current instruction id is an upper bound of the number of HIRs in the graph.
5699 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
5700 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005701 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5702 Register temp = temps.AcquireW();
5703 __ Subs(temp, value_reg, Operand(lower_bound));
5704
Zheng Xu3927c8b2015-11-18 17:46:25 +08005705 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005706 // Jump to successors[0] if value == lower_bound.
5707 __ B(eq, codegen_->GetLabelOf(successors[0]));
5708 int32_t last_index = 0;
5709 for (; num_entries - last_index > 2; last_index += 2) {
5710 __ Subs(temp, temp, Operand(2));
5711 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
5712 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
5713 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
5714 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
5715 }
5716 if (num_entries - last_index == 2) {
5717 // The last missing case_value.
5718 __ Cmp(temp, Operand(1));
5719 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08005720 }
5721
5722 // And the default for any other value.
5723 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
5724 __ B(codegen_->GetLabelOf(default_block));
5725 }
5726 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01005727 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08005728
5729 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5730
5731 // Below instructions should use at most one blocked register. Since there are two blocked
5732 // registers, we are free to block one.
5733 Register temp_w = temps.AcquireW();
5734 Register index;
5735 // Remove the bias.
5736 if (lower_bound != 0) {
5737 index = temp_w;
5738 __ Sub(index, value_reg, Operand(lower_bound));
5739 } else {
5740 index = value_reg;
5741 }
5742
5743 // Jump to default block if index is out of the range.
5744 __ Cmp(index, Operand(num_entries));
5745 __ B(hs, codegen_->GetLabelOf(default_block));
5746
5747 // In current VIXL implementation, it won't require any blocked registers to encode the
5748 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
5749 // register pressure.
5750 Register table_base = temps.AcquireX();
5751 // Load jump offset from the table.
5752 __ Adr(table_base, jump_table->GetTableStartLabel());
5753 Register jump_offset = temp_w;
5754 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5755
5756 // Jump to target block by branching to table_base(pc related) + offset.
5757 Register target_address = table_base;
5758 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5759 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005760 }
5761}
5762
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005763void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
5764 HInstruction* instruction,
5765 Location out,
5766 uint32_t offset,
5767 Location maybe_temp,
5768 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005769 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00005770 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005771 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005772 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005773 if (kUseBakerReadBarrier) {
5774 // Load with fast path based Baker's read barrier.
5775 // /* HeapReference<Object> */ out = *(out + offset)
5776 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5777 out,
5778 out_reg,
5779 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005780 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08005781 /* needs_null_check= */ false,
5782 /* use_load_acquire= */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005783 } else {
5784 // Load with slow path based read barrier.
5785 // Save the value of `out` into `maybe_temp` before overwriting it
5786 // in the following move operation, as we will need it for the
5787 // read barrier below.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005788 Register temp_reg = RegisterFrom(maybe_temp, type);
Roland Levillain44015862016-01-22 11:47:17 +00005789 __ Mov(temp_reg, out_reg);
5790 // /* HeapReference<Object> */ out = *(out + offset)
5791 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5792 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5793 }
5794 } else {
5795 // Plain load with no read barrier.
5796 // /* HeapReference<Object> */ out = *(out + offset)
5797 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5798 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5799 }
5800}
5801
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005802void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
5803 HInstruction* instruction,
5804 Location out,
5805 Location obj,
5806 uint32_t offset,
5807 Location maybe_temp,
5808 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005809 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00005810 Register out_reg = RegisterFrom(out, type);
5811 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005812 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005813 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005814 if (kUseBakerReadBarrier) {
5815 // Load with fast path based Baker's read barrier.
Roland Levillain44015862016-01-22 11:47:17 +00005816 // /* HeapReference<Object> */ out = *(obj + offset)
5817 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5818 out,
5819 obj_reg,
5820 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005821 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08005822 /* needs_null_check= */ false,
5823 /* use_load_acquire= */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005824 } else {
5825 // Load with slow path based read barrier.
5826 // /* HeapReference<Object> */ out = *(obj + offset)
5827 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5828 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5829 }
5830 } else {
5831 // Plain load with no read barrier.
5832 // /* HeapReference<Object> */ out = *(obj + offset)
5833 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5834 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5835 }
5836}
5837
Vladimir Markoca1e0382018-04-11 09:58:41 +00005838void CodeGeneratorARM64::GenerateGcRootFieldLoad(
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005839 HInstruction* instruction,
5840 Location root,
5841 Register obj,
5842 uint32_t offset,
5843 vixl::aarch64::Label* fixup_label,
5844 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005845 DCHECK(fixup_label == nullptr || offset == 0u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005846 Register root_reg = RegisterFrom(root, DataType::Type::kReference);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005847 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005848 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005849 if (kUseBakerReadBarrier) {
5850 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00005851 // Baker's read barrier are used.
Roland Levillain44015862016-01-22 11:47:17 +00005852
Vladimir Marko008e09f32018-08-06 15:42:43 +01005853 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
5854 // the Marking Register) to decide whether we need to enter
5855 // the slow path to mark the GC root.
5856 //
5857 // We use shared thunks for the slow path; shared within the method
5858 // for JIT, across methods for AOT. That thunk checks the reference
5859 // and jumps to the entrypoint if needed.
5860 //
5861 // lr = &return_address;
5862 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5863 // if (mr) { // Thread::Current()->GetIsGcMarking()
5864 // goto gc_root_thunk<root_reg>(lr)
5865 // }
5866 // return_address:
Roland Levillainba650a42017-03-06 13:52:32 +00005867
Vladimir Marko008e09f32018-08-06 15:42:43 +01005868 UseScratchRegisterScope temps(GetVIXLAssembler());
5869 DCHECK(temps.IsAvailable(ip0));
5870 DCHECK(temps.IsAvailable(ip1));
5871 temps.Exclude(ip0, ip1);
5872 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(root_reg.GetCode());
Roland Levillain44015862016-01-22 11:47:17 +00005873
Vladimir Marko008e09f32018-08-06 15:42:43 +01005874 ExactAssemblyScope guard(GetVIXLAssembler(), 3 * vixl::aarch64::kInstructionSize);
5875 vixl::aarch64::Label return_address;
5876 __ adr(lr, &return_address);
5877 if (fixup_label != nullptr) {
5878 __ bind(fixup_label);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005879 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01005880 static_assert(BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_OFFSET == -8,
Vladimir Marko94796f82018-08-08 15:15:33 +01005881 "GC root LDR must be 2 instructions (8B) before the return address label.");
Vladimir Marko008e09f32018-08-06 15:42:43 +01005882 __ ldr(root_reg, MemOperand(obj.X(), offset));
5883 EmitBakerReadBarrierCbnz(custom_data);
5884 __ bind(&return_address);
Roland Levillain44015862016-01-22 11:47:17 +00005885 } else {
5886 // GC root loaded through a slow path for read barriers other
5887 // than Baker's.
5888 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005889 if (fixup_label == nullptr) {
5890 __ Add(root_reg.X(), obj.X(), offset);
5891 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00005892 EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005893 }
Roland Levillain44015862016-01-22 11:47:17 +00005894 // /* mirror::Object* */ root = root->Read()
Vladimir Markoca1e0382018-04-11 09:58:41 +00005895 GenerateReadBarrierForRootSlow(instruction, root, root);
Roland Levillain44015862016-01-22 11:47:17 +00005896 }
5897 } else {
5898 // Plain GC root load with no read barrier.
5899 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005900 if (fixup_label == nullptr) {
5901 __ Ldr(root_reg, MemOperand(obj, offset));
5902 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00005903 EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005904 }
Roland Levillain44015862016-01-22 11:47:17 +00005905 // Note that GC roots are not affected by heap poisoning, thus we
5906 // do not have to unpoison `root_reg` here.
5907 }
Andreas Gampe3db70682018-12-26 15:12:03 -08005908 MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Roland Levillain44015862016-01-22 11:47:17 +00005909}
5910
Vladimir Marko94796f82018-08-08 15:15:33 +01005911void CodeGeneratorARM64::GenerateUnsafeCasOldValueMovWithBakerReadBarrier(
5912 vixl::aarch64::Register marked,
5913 vixl::aarch64::Register old_value) {
5914 DCHECK(kEmitCompilerReadBarrier);
5915 DCHECK(kUseBakerReadBarrier);
5916
5917 // Similar to the Baker RB path in GenerateGcRootFieldLoad(), with a MOV instead of LDR.
5918 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(marked.GetCode());
5919
5920 ExactAssemblyScope guard(GetVIXLAssembler(), 3 * vixl::aarch64::kInstructionSize);
5921 vixl::aarch64::Label return_address;
5922 __ adr(lr, &return_address);
5923 static_assert(BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_OFFSET == -8,
5924 "GC root LDR must be 2 instructions (8B) before the return address label.");
5925 __ mov(marked, old_value);
5926 EmitBakerReadBarrierCbnz(custom_data);
5927 __ bind(&return_address);
5928}
5929
Roland Levillain44015862016-01-22 11:47:17 +00005930void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5931 Location ref,
Vladimir Marko248141f2018-08-10 10:40:07 +01005932 vixl::aarch64::Register obj,
5933 const vixl::aarch64::MemOperand& src,
Roland Levillain44015862016-01-22 11:47:17 +00005934 bool needs_null_check,
5935 bool use_load_acquire) {
5936 DCHECK(kEmitCompilerReadBarrier);
5937 DCHECK(kUseBakerReadBarrier);
5938
Vladimir Marko0ecac682018-08-07 10:40:38 +01005939 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
5940 // Marking Register) to decide whether we need to enter the slow
5941 // path to mark the reference. Then, in the slow path, check the
5942 // gray bit in the lock word of the reference's holder (`obj`) to
5943 // decide whether to mark `ref` or not.
5944 //
5945 // We use shared thunks for the slow path; shared within the method
5946 // for JIT, across methods for AOT. That thunk checks the holder
5947 // and jumps to the entrypoint if needed. If the holder is not gray,
5948 // it creates a fake dependency and returns to the LDR instruction.
5949 //
5950 // lr = &gray_return_address;
5951 // if (mr) { // Thread::Current()->GetIsGcMarking()
5952 // goto field_thunk<holder_reg, base_reg, use_load_acquire>(lr)
5953 // }
5954 // not_gray_return_address:
5955 // // Original reference load. If the offset is too large to fit
5956 // // into LDR, we use an adjusted base register here.
5957 // HeapReference<mirror::Object> reference = *(obj+offset);
5958 // gray_return_address:
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005959
Vladimir Marko248141f2018-08-10 10:40:07 +01005960 DCHECK(src.GetAddrMode() == vixl::aarch64::Offset);
5961 DCHECK_ALIGNED(src.GetOffset(), sizeof(mirror::HeapReference<mirror::Object>));
5962
5963 UseScratchRegisterScope temps(GetVIXLAssembler());
5964 DCHECK(temps.IsAvailable(ip0));
5965 DCHECK(temps.IsAvailable(ip1));
5966 temps.Exclude(ip0, ip1);
5967 uint32_t custom_data = use_load_acquire
5968 ? EncodeBakerReadBarrierAcquireData(src.GetBaseRegister().GetCode(), obj.GetCode())
5969 : EncodeBakerReadBarrierFieldData(src.GetBaseRegister().GetCode(), obj.GetCode());
5970
5971 {
5972 ExactAssemblyScope guard(GetVIXLAssembler(),
5973 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
5974 vixl::aarch64::Label return_address;
5975 __ adr(lr, &return_address);
5976 EmitBakerReadBarrierCbnz(custom_data);
5977 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
5978 "Field LDR must be 1 instruction (4B) before the return address label; "
5979 " 2 instructions (8B) for heap poisoning.");
5980 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
5981 if (use_load_acquire) {
5982 DCHECK_EQ(src.GetOffset(), 0);
5983 __ ldar(ref_reg, src);
5984 } else {
5985 __ ldr(ref_reg, src);
5986 }
5987 if (needs_null_check) {
5988 MaybeRecordImplicitNullCheck(instruction);
5989 }
5990 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
5991 // macro instructions disallowed in ExactAssemblyScope.
5992 if (kPoisonHeapReferences) {
5993 __ neg(ref_reg, Operand(ref_reg));
5994 }
5995 __ bind(&return_address);
5996 }
Andreas Gampe3db70682018-12-26 15:12:03 -08005997 MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__, /* temp_loc= */ LocationFrom(ip1));
Vladimir Marko248141f2018-08-10 10:40:07 +01005998}
5999
6000void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6001 Location ref,
6002 Register obj,
6003 uint32_t offset,
6004 Location maybe_temp,
6005 bool needs_null_check,
6006 bool use_load_acquire) {
Vladimir Marko0ecac682018-08-07 10:40:38 +01006007 DCHECK_ALIGNED(offset, sizeof(mirror::HeapReference<mirror::Object>));
6008 Register base = obj;
6009 if (use_load_acquire) {
6010 DCHECK(maybe_temp.IsRegister());
6011 base = WRegisterFrom(maybe_temp);
6012 __ Add(base, obj, offset);
6013 offset = 0u;
6014 } else if (offset >= kReferenceLoadMinFarOffset) {
6015 DCHECK(maybe_temp.IsRegister());
6016 base = WRegisterFrom(maybe_temp);
6017 static_assert(IsPowerOfTwo(kReferenceLoadMinFarOffset), "Expecting a power of 2.");
6018 __ Add(base, obj, Operand(offset & ~(kReferenceLoadMinFarOffset - 1u)));
6019 offset &= (kReferenceLoadMinFarOffset - 1u);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006020 }
Vladimir Marko248141f2018-08-10 10:40:07 +01006021 MemOperand src(base.X(), offset);
6022 GenerateFieldLoadWithBakerReadBarrier(
6023 instruction, ref, obj, src, needs_null_check, use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006024}
6025
Artem Serov0806f582018-10-11 20:14:20 +01006026void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HArrayGet* instruction,
6027 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006028 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006029 uint32_t data_offset,
6030 Location index,
Roland Levillain44015862016-01-22 11:47:17 +00006031 bool needs_null_check) {
6032 DCHECK(kEmitCompilerReadBarrier);
6033 DCHECK(kUseBakerReadBarrier);
6034
Vladimir Marko66d691d2017-04-07 17:53:39 +01006035 static_assert(
6036 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6037 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006038 size_t scale_factor = DataType::SizeShift(DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006039
Vladimir Marko008e09f32018-08-06 15:42:43 +01006040 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6041 // Marking Register) to decide whether we need to enter the slow
6042 // path to mark the reference. Then, in the slow path, check the
6043 // gray bit in the lock word of the reference's holder (`obj`) to
6044 // decide whether to mark `ref` or not.
6045 //
6046 // We use shared thunks for the slow path; shared within the method
6047 // for JIT, across methods for AOT. That thunk checks the holder
6048 // and jumps to the entrypoint if needed. If the holder is not gray,
6049 // it creates a fake dependency and returns to the LDR instruction.
6050 //
6051 // lr = &gray_return_address;
6052 // if (mr) { // Thread::Current()->GetIsGcMarking()
6053 // goto array_thunk<base_reg>(lr)
6054 // }
6055 // not_gray_return_address:
6056 // // Original reference load. If the offset is too large to fit
6057 // // into LDR, we use an adjusted base register here.
6058 // HeapReference<mirror::Object> reference = data[index];
6059 // gray_return_address:
Vladimir Marko66d691d2017-04-07 17:53:39 +01006060
Vladimir Marko008e09f32018-08-06 15:42:43 +01006061 DCHECK(index.IsValid());
6062 Register index_reg = RegisterFrom(index, DataType::Type::kInt32);
6063 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006064
Vladimir Marko008e09f32018-08-06 15:42:43 +01006065 UseScratchRegisterScope temps(GetVIXLAssembler());
6066 DCHECK(temps.IsAvailable(ip0));
6067 DCHECK(temps.IsAvailable(ip1));
6068 temps.Exclude(ip0, ip1);
Artem Serov0806f582018-10-11 20:14:20 +01006069
6070 Register temp;
6071 if (instruction->GetArray()->IsIntermediateAddress()) {
6072 // We do not need to compute the intermediate address from the array: the
6073 // input instruction has done it already. See the comment in
6074 // `TryExtractArrayAccessAddress()`.
6075 if (kIsDebugBuild) {
6076 HIntermediateAddress* interm_addr = instruction->GetArray()->AsIntermediateAddress();
6077 DCHECK_EQ(interm_addr->GetOffset()->AsIntConstant()->GetValueAsUint64(), data_offset);
6078 }
6079 temp = obj;
6080 } else {
6081 temp = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
6082 __ Add(temp.X(), obj.X(), Operand(data_offset));
6083 }
6084
Vladimir Marko008e09f32018-08-06 15:42:43 +01006085 uint32_t custom_data = EncodeBakerReadBarrierArrayData(temp.GetCode());
Vladimir Marko66d691d2017-04-07 17:53:39 +01006086
Vladimir Marko008e09f32018-08-06 15:42:43 +01006087 {
6088 ExactAssemblyScope guard(GetVIXLAssembler(),
6089 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6090 vixl::aarch64::Label return_address;
6091 __ adr(lr, &return_address);
6092 EmitBakerReadBarrierCbnz(custom_data);
6093 static_assert(BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6094 "Array LDR must be 1 instruction (4B) before the return address label; "
6095 " 2 instructions (8B) for heap poisoning.");
6096 __ ldr(ref_reg, MemOperand(temp.X(), index_reg.X(), LSL, scale_factor));
6097 DCHECK(!needs_null_check); // The thunk cannot handle the null check.
6098 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
6099 // macro instructions disallowed in ExactAssemblyScope.
6100 if (kPoisonHeapReferences) {
6101 __ neg(ref_reg, Operand(ref_reg));
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006102 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006103 __ bind(&return_address);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006104 }
Andreas Gampe3db70682018-12-26 15:12:03 -08006105 MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__, /* temp_loc= */ LocationFrom(ip1));
Roland Levillain44015862016-01-22 11:47:17 +00006106}
6107
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006108void CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck(int code, Location temp_loc) {
6109 // The following condition is a compile-time one, so it does not have a run-time cost.
6110 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && kIsDebugBuild) {
6111 // The following condition is a run-time one; it is executed after the
6112 // previous compile-time test, to avoid penalizing non-debug builds.
6113 if (GetCompilerOptions().EmitRunTimeChecksInDebugMode()) {
6114 UseScratchRegisterScope temps(GetVIXLAssembler());
6115 Register temp = temp_loc.IsValid() ? WRegisterFrom(temp_loc) : temps.AcquireW();
6116 GetAssembler()->GenerateMarkingRegisterCheck(temp, code);
6117 }
6118 }
6119}
6120
Roland Levillain44015862016-01-22 11:47:17 +00006121void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
6122 Location out,
6123 Location ref,
6124 Location obj,
6125 uint32_t offset,
6126 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006127 DCHECK(kEmitCompilerReadBarrier);
6128
Roland Levillain44015862016-01-22 11:47:17 +00006129 // Insert a slow path based read barrier *after* the reference load.
6130 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006131 // If heap poisoning is enabled, the unpoisoning of the loaded
6132 // reference will be carried out by the runtime within the slow
6133 // path.
6134 //
6135 // Note that `ref` currently does not get unpoisoned (when heap
6136 // poisoning is enabled), which is alright as the `ref` argument is
6137 // not used by the artReadBarrierSlow entry point.
6138 //
6139 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006140 SlowPathCodeARM64* slow_path = new (GetScopedAllocator())
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006141 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
6142 AddSlowPath(slow_path);
6143
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006144 __ B(slow_path->GetEntryLabel());
6145 __ Bind(slow_path->GetExitLabel());
6146}
6147
Roland Levillain44015862016-01-22 11:47:17 +00006148void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6149 Location out,
6150 Location ref,
6151 Location obj,
6152 uint32_t offset,
6153 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006154 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00006155 // Baker's read barriers shall be handled by the fast path
6156 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
6157 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006158 // If heap poisoning is enabled, unpoisoning will be taken care of
6159 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00006160 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006161 } else if (kPoisonHeapReferences) {
6162 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
6163 }
6164}
6165
Roland Levillain44015862016-01-22 11:47:17 +00006166void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6167 Location out,
6168 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006169 DCHECK(kEmitCompilerReadBarrier);
6170
Roland Levillain44015862016-01-22 11:47:17 +00006171 // Insert a slow path based read barrier *after* the GC root load.
6172 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006173 // Note that GC roots are not affected by heap poisoning, so we do
6174 // not need to do anything special for this here.
6175 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006176 new (GetScopedAllocator()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006177 AddSlowPath(slow_path);
6178
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006179 __ B(slow_path->GetEntryLabel());
6180 __ Bind(slow_path->GetExitLabel());
6181}
6182
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006183void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
6184 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006185 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006186 locations->SetInAt(0, Location::RequiresRegister());
6187 locations->SetOut(Location::RequiresRegister());
6188}
6189
6190void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
6191 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00006192 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006193 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006194 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006195 __ Ldr(XRegisterFrom(locations->Out()),
6196 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006197 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006198 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006199 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006200 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
6201 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006202 __ Ldr(XRegisterFrom(locations->Out()),
6203 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006204 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006205}
6206
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006207static void PatchJitRootUse(uint8_t* code,
6208 const uint8_t* roots_data,
6209 vixl::aarch64::Literal<uint32_t>* literal,
6210 uint64_t index_in_table) {
6211 uint32_t literal_offset = literal->GetOffset();
6212 uintptr_t address =
6213 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
6214 uint8_t* data = code + literal_offset;
6215 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
6216}
6217
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006218void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
6219 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006220 const StringReference& string_reference = entry.first;
6221 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006222 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006223 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006224 }
6225 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006226 const TypeReference& type_reference = entry.first;
6227 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006228 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006229 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006230 }
6231}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006232
Alexandre Rames67555f72014-11-18 10:55:16 +00006233#undef __
6234#undef QUICK_ENTRY_POINT
6235
Vladimir Markoca1e0382018-04-11 09:58:41 +00006236#define __ assembler.GetVIXLAssembler()->
6237
6238static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler,
6239 vixl::aarch64::Register base_reg,
6240 vixl::aarch64::MemOperand& lock_word,
Vladimir Marko7a695052018-04-12 10:26:50 +01006241 vixl::aarch64::Label* slow_path,
6242 vixl::aarch64::Label* throw_npe = nullptr) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006243 // Load the lock word containing the rb_state.
6244 __ Ldr(ip0.W(), lock_word);
6245 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01006246 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Vladimir Markoca1e0382018-04-11 09:58:41 +00006247 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6248 __ Tbnz(ip0.W(), LockWord::kReadBarrierStateShift, slow_path);
6249 static_assert(
6250 BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET,
6251 "Field and array LDR offsets must be the same to reuse the same code.");
Vladimir Marko7a695052018-04-12 10:26:50 +01006252 // To throw NPE, we return to the fast path; the artificial dependence below does not matter.
6253 if (throw_npe != nullptr) {
6254 __ Bind(throw_npe);
6255 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006256 // Adjust the return address back to the LDR (1 instruction; 2 for heap poisoning).
6257 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6258 "Field LDR must be 1 instruction (4B) before the return address label; "
6259 " 2 instructions (8B) for heap poisoning.");
6260 __ Add(lr, lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6261 // Introduce a dependency on the lock_word including rb_state,
6262 // to prevent load-load reordering, and without using
6263 // a memory barrier (which would be more expensive).
6264 __ Add(base_reg, base_reg, Operand(ip0, LSR, 32));
6265 __ Br(lr); // And return back to the function.
6266 // Note: The fake dependency is unnecessary for the slow path.
6267}
6268
6269// Load the read barrier introspection entrypoint in register `entrypoint`.
6270static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler,
6271 vixl::aarch64::Register entrypoint) {
6272 // entrypoint = Thread::Current()->pReadBarrierMarkReg16, i.e. pReadBarrierMarkIntrospection.
6273 DCHECK_EQ(ip0.GetCode(), 16u);
6274 const int32_t entry_point_offset =
6275 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ip0.GetCode());
6276 __ Ldr(entrypoint, MemOperand(tr, entry_point_offset));
6277}
6278
6279void CodeGeneratorARM64::CompileBakerReadBarrierThunk(Arm64Assembler& assembler,
6280 uint32_t encoded_data,
6281 /*out*/ std::string* debug_name) {
6282 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
6283 switch (kind) {
Vladimir Marko0ecac682018-08-07 10:40:38 +01006284 case BakerReadBarrierKind::kField:
6285 case BakerReadBarrierKind::kAcquire: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006286 auto base_reg =
6287 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6288 CheckValidReg(base_reg.GetCode());
6289 auto holder_reg =
6290 Register::GetXRegFromCode(BakerReadBarrierSecondRegField::Decode(encoded_data));
6291 CheckValidReg(holder_reg.GetCode());
6292 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6293 temps.Exclude(ip0, ip1);
Vladimir Marko7a695052018-04-12 10:26:50 +01006294 // If base_reg differs from holder_reg, the offset was too large and we must have emitted
6295 // an explicit null check before the load. Otherwise, for implicit null checks, we need to
6296 // null-check the holder as we do not necessarily do that check before going to the thunk.
6297 vixl::aarch64::Label throw_npe_label;
6298 vixl::aarch64::Label* throw_npe = nullptr;
6299 if (GetCompilerOptions().GetImplicitNullChecks() && holder_reg.Is(base_reg)) {
6300 throw_npe = &throw_npe_label;
6301 __ Cbz(holder_reg.W(), throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006302 }
Vladimir Marko7a695052018-04-12 10:26:50 +01006303 // Check if the holder is gray and, if not, add fake dependency to the base register
6304 // and return to the LDR instruction to load the reference. Otherwise, use introspection
6305 // to load the reference and call the entrypoint that performs further checks on the
6306 // reference and marks it if needed.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006307 vixl::aarch64::Label slow_path;
6308 MemOperand lock_word(holder_reg, mirror::Object::MonitorOffset().Int32Value());
Vladimir Marko7a695052018-04-12 10:26:50 +01006309 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006310 __ Bind(&slow_path);
Vladimir Marko0ecac682018-08-07 10:40:38 +01006311 if (kind == BakerReadBarrierKind::kField) {
6312 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6313 __ Ldr(ip0.W(), ldr_address); // Load the LDR (immediate) unsigned offset.
6314 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6315 __ Ubfx(ip0.W(), ip0.W(), 10, 12); // Extract the offset.
6316 __ Ldr(ip0.W(), MemOperand(base_reg, ip0, LSL, 2)); // Load the reference.
6317 } else {
6318 DCHECK(kind == BakerReadBarrierKind::kAcquire);
6319 DCHECK(!base_reg.Is(holder_reg));
6320 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6321 __ Ldar(ip0.W(), MemOperand(base_reg));
6322 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006323 // Do not unpoison. With heap poisoning enabled, the entrypoint expects a poisoned reference.
6324 __ Br(ip1); // Jump to the entrypoint.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006325 break;
6326 }
6327 case BakerReadBarrierKind::kArray: {
6328 auto base_reg =
6329 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6330 CheckValidReg(base_reg.GetCode());
6331 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6332 BakerReadBarrierSecondRegField::Decode(encoded_data));
6333 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6334 temps.Exclude(ip0, ip1);
6335 vixl::aarch64::Label slow_path;
6336 int32_t data_offset =
6337 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
6338 MemOperand lock_word(base_reg, mirror::Object::MonitorOffset().Int32Value() - data_offset);
6339 DCHECK_LT(lock_word.GetOffset(), 0);
6340 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path);
6341 __ Bind(&slow_path);
6342 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET);
6343 __ Ldr(ip0.W(), ldr_address); // Load the LDR (register) unsigned offset.
6344 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6345 __ Ubfx(ip0, ip0, 16, 6); // Extract the index register, plus 32 (bit 21 is set).
6346 __ Bfi(ip1, ip0, 3, 6); // Insert ip0 to the entrypoint address to create
6347 // a switch case target based on the index register.
6348 __ Mov(ip0, base_reg); // Move the base register to ip0.
6349 __ Br(ip1); // Jump to the entrypoint's array switch case.
6350 break;
6351 }
6352 case BakerReadBarrierKind::kGcRoot: {
6353 // Check if the reference needs to be marked and if so (i.e. not null, not marked yet
6354 // and it does not have a forwarding address), call the correct introspection entrypoint;
6355 // otherwise return the reference (or the extracted forwarding address).
6356 // There is no gray bit check for GC roots.
6357 auto root_reg =
6358 Register::GetWRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6359 CheckValidReg(root_reg.GetCode());
6360 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6361 BakerReadBarrierSecondRegField::Decode(encoded_data));
6362 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6363 temps.Exclude(ip0, ip1);
6364 vixl::aarch64::Label return_label, not_marked, forwarding_address;
6365 __ Cbz(root_reg, &return_label);
6366 MemOperand lock_word(root_reg.X(), mirror::Object::MonitorOffset().Int32Value());
6367 __ Ldr(ip0.W(), lock_word);
6368 __ Tbz(ip0.W(), LockWord::kMarkBitStateShift, &not_marked);
6369 __ Bind(&return_label);
6370 __ Br(lr);
6371 __ Bind(&not_marked);
6372 __ Tst(ip0.W(), Operand(ip0.W(), LSL, 1));
6373 __ B(&forwarding_address, mi);
6374 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6375 // Adjust the art_quick_read_barrier_mark_introspection address in IP1 to
6376 // art_quick_read_barrier_mark_introspection_gc_roots.
6377 __ Add(ip1, ip1, Operand(BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRYPOINT_OFFSET));
6378 __ Mov(ip0.W(), root_reg);
6379 __ Br(ip1);
6380 __ Bind(&forwarding_address);
6381 __ Lsl(root_reg, ip0.W(), LockWord::kForwardingAddressShift);
6382 __ Br(lr);
6383 break;
6384 }
6385 default:
6386 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
6387 UNREACHABLE();
6388 }
6389
Vladimir Marko966b46f2018-08-03 10:20:19 +00006390 // For JIT, the slow path is considered part of the compiled method,
6391 // so JIT should pass null as `debug_name`. Tests may not have a runtime.
6392 DCHECK(Runtime::Current() == nullptr ||
6393 !Runtime::Current()->UseJitCompilation() ||
6394 debug_name == nullptr);
6395 if (debug_name != nullptr && GetCompilerOptions().GenerateAnyDebugInfo()) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006396 std::ostringstream oss;
6397 oss << "BakerReadBarrierThunk";
6398 switch (kind) {
6399 case BakerReadBarrierKind::kField:
6400 oss << "Field_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
6401 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
6402 break;
Vladimir Marko0ecac682018-08-07 10:40:38 +01006403 case BakerReadBarrierKind::kAcquire:
6404 oss << "Acquire_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
6405 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
6406 break;
Vladimir Markoca1e0382018-04-11 09:58:41 +00006407 case BakerReadBarrierKind::kArray:
6408 oss << "Array_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6409 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6410 BakerReadBarrierSecondRegField::Decode(encoded_data));
6411 break;
6412 case BakerReadBarrierKind::kGcRoot:
6413 oss << "GcRoot_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6414 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6415 BakerReadBarrierSecondRegField::Decode(encoded_data));
6416 break;
6417 }
6418 *debug_name = oss.str();
6419 }
6420}
6421
6422#undef __
6423
Alexandre Rames5319def2014-10-23 10:03:10 +01006424} // namespace arm64
6425} // namespace art