blob: d3ce2db2143ce6b2bf51eb5fb4fe313c81a60f10 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Vladimir Markof4f2daa2017-03-20 18:26:59 +000019#include "arch/arm64/asm_support_arm64.h"
Serban Constantinescu579885a2015-02-22 20:51:33 +000020#include "arch/arm64/instruction_set_features_arm64.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000021#include "art_method-inl.h"
Andreas Gampe5678db52017-06-08 14:11:18 -070022#include "base/bit_utils.h"
23#include "base/bit_utils_iterator.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010024#include "class_table.h"
Zheng Xuc6667102015-05-15 16:08:45 +080025#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000026#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010027#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080028#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010029#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010030#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070031#include "heap_poisoning.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080032#include "intrinsics.h"
33#include "intrinsics_arm64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010034#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070035#include "lock_word.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010036#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070037#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000038#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010039#include "thread.h"
40#include "utils/arm64/assembler_arm64.h"
41#include "utils/assembler.h"
42#include "utils/stack_checks.h"
43
Scott Wakeling97c72b72016-06-24 16:19:36 +010044using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000045using vixl::ExactAssemblyScope;
46using vixl::CodeBufferCheckScope;
47using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010048
49#ifdef __
50#error "ARM64 Codegen VIXL macro-assembler macro already defined."
51#endif
52
Vladimir Marko0a516052019-10-14 13:00:44 +000053namespace art {
Alexandre Rames5319def2014-10-23 10:03:10 +010054
Roland Levillain22ccc3a2015-11-24 13:10:05 +000055template<class MirrorType>
56class GcRoot;
57
Alexandre Rames5319def2014-10-23 10:03:10 +010058namespace arm64 {
59
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::ARM64EncodableConstantOrRegister;
61using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::CPURegisterFrom;
63using helpers::DRegisterFrom;
64using helpers::FPRegisterFrom;
65using helpers::HeapOperand;
66using helpers::HeapOperandFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010067using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080068using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080069using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010070using helpers::InputRegisterAt;
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +010071using helpers::Int64FromLocation;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010072using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080073using helpers::LocationFrom;
74using helpers::OperandFromMemOperand;
75using helpers::OutputCPURegister;
76using helpers::OutputFPRegister;
77using helpers::OutputRegister;
Artem Serovd4bccf12017-04-03 18:47:32 +010078using helpers::QRegisterFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080079using helpers::RegisterFrom;
80using helpers::StackOperandFrom;
81using helpers::VIXLRegCodeFromART;
82using helpers::WRegisterFrom;
83using helpers::XRegisterFrom;
84
Vladimir Markof3e0ee22015-12-17 15:23:13 +000085// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080086// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
87// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000088static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010089
Vladimir Markof4f2daa2017-03-20 18:26:59 +000090// Reference load (except object array loads) is using LDR Wt, [Xn, #offset] which can handle
91// offset < 16KiB. For offsets >= 16KiB, the load shall be emitted as two or more instructions.
Vladimir Marko008e09f32018-08-06 15:42:43 +010092// For the Baker read barrier implementation using link-time generated thunks we need to split
Vladimir Markof4f2daa2017-03-20 18:26:59 +000093// the offset explicitly.
94constexpr uint32_t kReferenceLoadMinFarOffset = 16 * KB;
95
Alexandre Rames5319def2014-10-23 10:03:10 +010096inline Condition ARM64Condition(IfCondition cond) {
97 switch (cond) {
98 case kCondEQ: return eq;
99 case kCondNE: return ne;
100 case kCondLT: return lt;
101 case kCondLE: return le;
102 case kCondGT: return gt;
103 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -0700104 case kCondB: return lo;
105 case kCondBE: return ls;
106 case kCondA: return hi;
107 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +0100108 }
Roland Levillain7f63c522015-07-13 15:54:55 +0000109 LOG(FATAL) << "Unreachable";
110 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +0100111}
112
Vladimir Markod6e069b2016-01-18 11:11:01 +0000113inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
114 // The ARM64 condition codes can express all the necessary branches, see the
115 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
116 // There is no dex instruction or HIR that would need the missing conditions
117 // "equal or unordered" or "not equal".
118 switch (cond) {
119 case kCondEQ: return eq;
120 case kCondNE: return ne /* unordered */;
121 case kCondLT: return gt_bias ? cc : lt /* unordered */;
122 case kCondLE: return gt_bias ? ls : le /* unordered */;
123 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
124 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
125 default:
126 LOG(FATAL) << "UNREACHABLE";
127 UNREACHABLE();
128 }
129}
130
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100131Location ARM64ReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
133 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
134 // but we use the exact registers for clarity.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100135 if (return_type == DataType::Type::kFloat32) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000136 return LocationFrom(s0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100137 } else if (return_type == DataType::Type::kFloat64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000138 return LocationFrom(d0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100139 } else if (return_type == DataType::Type::kInt64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000140 return LocationFrom(x0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100141 } else if (return_type == DataType::Type::kVoid) {
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100142 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000143 } else {
144 return LocationFrom(w0);
145 }
146}
147
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100148Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000149 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100150}
151
Vladimir Marko3232dbb2018-07-25 15:42:46 +0100152static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
153 InvokeRuntimeCallingConvention calling_convention;
154 RegisterSet caller_saves = RegisterSet::Empty();
155 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
156 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
157 RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference),
158 DataType::Type::kReference).GetCode());
159 return caller_saves;
160}
161
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100162// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
163#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700164#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100165
Zheng Xuda403092015-04-24 17:35:39 +0800166// Calculate memory accessing operand for save/restore live registers.
167static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100168 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800169 int64_t spill_offset,
170 bool is_save) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800171 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true);
172 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false);
Vladimir Marko804b03f2016-09-14 16:26:36 +0100173 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800174 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100175 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800176 codegen->GetNumberOfFloatingPointRegisters()));
177
Vladimir Marko804b03f2016-09-14 16:26:36 +0100178 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
Artem Serov7957d952017-04-04 15:44:09 +0100179 unsigned v_reg_size = codegen->GetGraph()->HasSIMD() ? kQRegSize : kDRegSize;
180 CPURegList fp_list = CPURegList(CPURegister::kVRegister, v_reg_size, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800181
182 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
183 UseScratchRegisterScope temps(masm);
184
185 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100186 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
187 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800188 int64_t reg_size = kXRegSizeInBytes;
189 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
190 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100191 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800192 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
193 // If the offset does not fit in the instruction's immediate field, use an alternate register
194 // to compute the base address(float point registers spill base address).
195 Register new_base = temps.AcquireSameSizeAs(base);
196 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
197 base = new_base;
198 spill_offset = -core_spill_size;
199 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
200 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
201 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
202 }
203
204 if (is_save) {
205 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
206 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
207 } else {
208 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
209 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
210 }
211}
212
213void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800214 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Andreas Gampe3db70682018-12-26 15:12:03 -0800215 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ true);
Vladimir Marko804b03f2016-09-14 16:26:36 +0100216 for (uint32_t i : LowToHighBits(core_spills)) {
217 // If the register holds an object, update the stack mask.
218 if (locations->RegisterContainsObject(i)) {
219 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800220 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100221 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
222 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
223 saved_core_stack_offsets_[i] = stack_offset;
224 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800225 }
226
Artem Serov9df37b92019-07-23 16:41:54 +0100227 const size_t fp_reg_size = codegen->GetGraph()->HasSIMD() ? kQRegSizeInBytes : kDRegSizeInBytes;
Andreas Gampe3db70682018-12-26 15:12:03 -0800228 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers= */ false);
Vladimir Marko804b03f2016-09-14 16:26:36 +0100229 for (uint32_t i : LowToHighBits(fp_spills)) {
230 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
231 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
232 saved_fpu_stack_offsets_[i] = stack_offset;
Artem Serov9df37b92019-07-23 16:41:54 +0100233 stack_offset += fp_reg_size;
Zheng Xuda403092015-04-24 17:35:39 +0800234 }
235
Vladimir Marko804b03f2016-09-14 16:26:36 +0100236 SaveRestoreLiveRegistersHelper(codegen,
237 locations,
Andreas Gampe3db70682018-12-26 15:12:03 -0800238 codegen->GetFirstRegisterSlotInSlowPath(), /* is_save= */ true);
Zheng Xuda403092015-04-24 17:35:39 +0800239}
240
241void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100242 SaveRestoreLiveRegistersHelper(codegen,
243 locations,
Andreas Gampe3db70682018-12-26 15:12:03 -0800244 codegen->GetFirstRegisterSlotInSlowPath(), /* is_save= */ false);
Zheng Xuda403092015-04-24 17:35:39 +0800245}
246
Alexandre Rames5319def2014-10-23 10:03:10 +0100247class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
248 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000249 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100250
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100251 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100252 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000253 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100254
Alexandre Rames5319def2014-10-23 10:03:10 +0100255 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000256 if (instruction_->CanThrowIntoCatchBlock()) {
257 // Live registers will be restored in the catch block if caught.
258 SaveLiveRegisters(codegen, instruction_->GetLocations());
259 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000260 // We're moving two locations to locations that could overlap, so we need a parallel
261 // move resolver.
262 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100263 codegen->EmitParallelMoves(locations->InAt(0),
264 LocationFrom(calling_convention.GetRegisterAt(0)),
265 DataType::Type::kInt32,
266 locations->InAt(1),
267 LocationFrom(calling_convention.GetRegisterAt(1)),
268 DataType::Type::kInt32);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000269 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
270 ? kQuickThrowStringBounds
271 : kQuickThrowArrayBounds;
272 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100273 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800274 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100275 }
276
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100277 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100278
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100279 const char* GetDescription() const override { return "BoundsCheckSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100280
Alexandre Rames5319def2014-10-23 10:03:10 +0100281 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100282 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
283};
284
Alexandre Rames67555f72014-11-18 10:55:16 +0000285class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
286 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000287 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000288
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100289 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames67555f72014-11-18 10:55:16 +0000290 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
291 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000292 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800293 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000294 }
295
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100296 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100297
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100298 const char* GetDescription() const override { return "DivZeroCheckSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100299
Alexandre Rames67555f72014-11-18 10:55:16 +0000300 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000301 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
302};
303
304class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
305 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100306 LoadClassSlowPathARM64(HLoadClass* cls, HInstruction* at)
307 : SlowPathCodeARM64(at), cls_(cls) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000308 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100309 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexandre Rames67555f72014-11-18 10:55:16 +0000310 }
311
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100312 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000313 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000314 Location out = locations->Out();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100315 const uint32_t dex_pc = instruction_->GetDexPc();
316 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
317 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
Alexandre Rames67555f72014-11-18 10:55:16 +0000318
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100319 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames67555f72014-11-18 10:55:16 +0000320 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000321 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000322
Vladimir Markof3c52b42017-11-17 17:32:12 +0000323 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100324 if (must_resolve_type) {
325 DCHECK(IsSameDexFile(cls_->GetDexFile(), arm64_codegen->GetGraph()->GetDexFile()));
326 dex::TypeIndex type_index = cls_->GetTypeIndex();
327 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Vladimir Marko9d479252018-07-24 11:35:20 +0100328 arm64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
329 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100330 // If we also must_do_clinit, the resolved type is now in the correct register.
331 } else {
332 DCHECK(must_do_clinit);
333 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
334 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)),
335 source,
336 cls_->GetType());
337 }
338 if (must_do_clinit) {
339 arm64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
340 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800341 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000342
343 // Move the class to the desired location.
Alexandre Rames67555f72014-11-18 10:55:16 +0000344 if (out.IsValid()) {
345 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100346 DataType::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000347 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000348 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000349 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000350 __ B(GetExitLabel());
351 }
352
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100353 const char* GetDescription() const override { return "LoadClassSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100354
Alexandre Rames67555f72014-11-18 10:55:16 +0000355 private:
356 // The class this slow path will load.
357 HLoadClass* const cls_;
358
Alexandre Rames67555f72014-11-18 10:55:16 +0000359 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
360};
361
Vladimir Markoaad75c62016-10-03 08:46:48 +0000362class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
363 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000364 explicit LoadStringSlowPathARM64(HLoadString* instruction)
365 : SlowPathCodeARM64(instruction) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000366
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100367 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000368 LocationSummary* locations = instruction_->GetLocations();
369 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
370 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
371
372 __ Bind(GetEntryLabel());
373 SaveLiveRegisters(codegen, locations);
374
Vladimir Markof3c52b42017-11-17 17:32:12 +0000375 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000376 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
377 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000378 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
379 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100380 DataType::Type type = instruction_->GetType();
Vladimir Markoaad75c62016-10-03 08:46:48 +0000381 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
382
383 RestoreLiveRegisters(codegen, locations);
384
Vladimir Markoaad75c62016-10-03 08:46:48 +0000385 __ B(GetExitLabel());
386 }
387
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100388 const char* GetDescription() const override { return "LoadStringSlowPathARM64"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000389
390 private:
391 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
392};
393
Alexandre Rames5319def2014-10-23 10:03:10 +0100394class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
395 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000396 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100397
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100398 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames67555f72014-11-18 10:55:16 +0000399 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100400 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000401 if (instruction_->CanThrowIntoCatchBlock()) {
402 // Live registers will be restored in the catch block if caught.
403 SaveLiveRegisters(codegen, instruction_->GetLocations());
404 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000405 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
406 instruction_,
407 instruction_->GetDexPc(),
408 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800409 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100410 }
411
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100412 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100413
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100414 const char* GetDescription() const override { return "NullCheckSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100415
Alexandre Rames5319def2014-10-23 10:03:10 +0100416 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100417 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
418};
419
420class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
421 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100422 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000423 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100424
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100425 void EmitNativeCode(CodeGenerator* codegen) override {
Artem Serov7957d952017-04-04 15:44:09 +0100426 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +0000427 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100428 __ Bind(GetEntryLabel());
Artem Serov7957d952017-04-04 15:44:09 +0100429 SaveLiveRegisters(codegen, locations); // Only saves live 128-bit regs for SIMD.
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000430 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800431 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Artem Serov7957d952017-04-04 15:44:09 +0100432 RestoreLiveRegisters(codegen, locations); // Only restores live 128-bit regs for SIMD.
Alexandre Rames67555f72014-11-18 10:55:16 +0000433 if (successor_ == nullptr) {
434 __ B(GetReturnLabel());
435 } else {
436 __ B(arm64_codegen->GetLabelOf(successor_));
437 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100438 }
439
Scott Wakeling97c72b72016-06-24 16:19:36 +0100440 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100441 DCHECK(successor_ == nullptr);
442 return &return_label_;
443 }
444
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100445 HBasicBlock* GetSuccessor() const {
446 return successor_;
447 }
448
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100449 const char* GetDescription() const override { return "SuspendCheckSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100450
Alexandre Rames5319def2014-10-23 10:03:10 +0100451 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100452 // If not null, the block to branch to after the suspend check.
453 HBasicBlock* const successor_;
454
455 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100456 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100457
458 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
459};
460
Alexandre Rames67555f72014-11-18 10:55:16 +0000461class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
462 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000463 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000464 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000465
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100466 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000467 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800468
Alexandre Rames3e69f162014-12-10 10:36:50 +0000469 DCHECK(instruction_->IsCheckCast()
470 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
471 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100472 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000473
Alexandre Rames67555f72014-11-18 10:55:16 +0000474 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000475
Vladimir Marko87584542017-12-12 17:47:52 +0000476 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000477 SaveLiveRegisters(codegen, locations);
478 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000479
480 // We're moving two locations to locations that could overlap, so we need a parallel
481 // move resolver.
482 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800483 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800484 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100485 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800486 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800487 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100488 DataType::Type::kReference);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000489 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000490 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800491 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100492 DataType::Type ret_type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000493 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
494 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
495 } else {
496 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800497 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
498 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000499 }
500
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000501 if (!is_fatal_) {
502 RestoreLiveRegisters(codegen, locations);
503 __ B(GetExitLabel());
504 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000505 }
506
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100507 const char* GetDescription() const override { return "TypeCheckSlowPathARM64"; }
508 bool IsFatal() const override { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100509
Alexandre Rames67555f72014-11-18 10:55:16 +0000510 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000511 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000512
Alexandre Rames67555f72014-11-18 10:55:16 +0000513 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
514};
515
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700516class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
517 public:
Aart Bik42249c32016-01-07 15:33:50 -0800518 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000519 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700520
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100521 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bik42249c32016-01-07 15:33:50 -0800522 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700523 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100524 LocationSummary* locations = instruction_->GetLocations();
525 SaveLiveRegisters(codegen, locations);
526 InvokeRuntimeCallingConvention calling_convention;
527 __ Mov(calling_convention.GetRegisterAt(0),
528 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000529 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100530 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700531 }
532
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100533 const char* GetDescription() const override { return "DeoptimizationSlowPathARM64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100534
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700535 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700536 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
537};
538
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100539class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
540 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000541 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100542
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100543 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100544 LocationSummary* locations = instruction_->GetLocations();
545 __ Bind(GetEntryLabel());
546 SaveLiveRegisters(codegen, locations);
547
548 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100549 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100550 parallel_move.AddMove(
551 locations->InAt(0),
552 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100553 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100554 nullptr);
555 parallel_move.AddMove(
556 locations->InAt(1),
557 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100558 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100559 nullptr);
560 parallel_move.AddMove(
561 locations->InAt(2),
562 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100563 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100564 nullptr);
565 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
566
567 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000568 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100569 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
570 RestoreLiveRegisters(codegen, locations);
571 __ B(GetExitLabel());
572 }
573
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100574 const char* GetDescription() const override { return "ArraySetSlowPathARM64"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100575
576 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100577 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
578};
579
Zheng Xu3927c8b2015-11-18 17:46:25 +0800580void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
581 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000582 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800583
584 // We are about to use the assembler to place literals directly. Make sure we have enough
585 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000586 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
587 num_entries * sizeof(int32_t),
588 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800589
590 __ Bind(&table_start_);
591 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
592 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100593 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800594 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100595 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800596 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
597 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
598 Literal<int32_t> literal(jump_offset);
599 __ place(&literal);
600 }
601}
602
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000603// Slow path generating a read barrier for a heap reference.
604class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
605 public:
606 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
607 Location out,
608 Location ref,
609 Location obj,
610 uint32_t offset,
611 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000612 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000613 out_(out),
614 ref_(ref),
615 obj_(obj),
616 offset_(offset),
617 index_(index) {
618 DCHECK(kEmitCompilerReadBarrier);
619 // If `obj` is equal to `out` or `ref`, it means the initial object
620 // has been overwritten by (or after) the heap object reference load
621 // to be instrumented, e.g.:
622 //
623 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000624 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000625 //
626 // In that case, we have lost the information about the original
627 // object, and the emitted read barrier cannot work properly.
628 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
629 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
630 }
631
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100632 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000633 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
634 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100635 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000636 DCHECK(locations->CanCall());
637 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100638 DCHECK(instruction_->IsInstanceFieldGet() ||
639 instruction_->IsStaticFieldGet() ||
640 instruction_->IsArrayGet() ||
641 instruction_->IsInstanceOf() ||
642 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700643 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000644 << "Unexpected instruction in read barrier for heap reference slow path: "
645 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +0000646 // The read barrier instrumentation of object ArrayGet
647 // instructions does not support the HIntermediateAddress
648 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000649 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100650 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000651
652 __ Bind(GetEntryLabel());
653
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000654 SaveLiveRegisters(codegen, locations);
655
656 // We may have to change the index's value, but as `index_` is a
657 // constant member (like other "inputs" of this slow path),
658 // introduce a copy of it, `index`.
659 Location index = index_;
660 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100661 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000662 if (instruction_->IsArrayGet()) {
663 // Compute the actual memory offset and store it in `index`.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100664 Register index_reg = RegisterFrom(index_, DataType::Type::kInt32);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000665 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
666 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
667 // We are about to change the value of `index_reg` (see the
668 // calls to vixl::MacroAssembler::Lsl and
669 // vixl::MacroAssembler::Mov below), but it has
670 // not been saved by the previous call to
671 // art::SlowPathCode::SaveLiveRegisters, as it is a
672 // callee-save register --
673 // art::SlowPathCode::SaveLiveRegisters does not consider
674 // callee-save registers, as it has been designed with the
675 // assumption that callee-save registers are supposed to be
676 // handled by the called function. So, as a callee-save
677 // register, `index_reg` _would_ eventually be saved onto
678 // the stack, but it would be too late: we would have
679 // changed its value earlier. Therefore, we manually save
680 // it here into another freely available register,
681 // `free_reg`, chosen of course among the caller-save
682 // registers (as a callee-save `free_reg` register would
683 // exhibit the same problem).
684 //
685 // Note we could have requested a temporary register from
686 // the register allocator instead; but we prefer not to, as
687 // this is a slow path, and we know we can find a
688 // caller-save register that is available.
689 Register free_reg = FindAvailableCallerSaveRegister(codegen);
690 __ Mov(free_reg.W(), index_reg);
691 index_reg = free_reg;
692 index = LocationFrom(index_reg);
693 } else {
694 // The initial register stored in `index_` has already been
695 // saved in the call to art::SlowPathCode::SaveLiveRegisters
696 // (as it is not a callee-save register), so we can freely
697 // use it.
698 }
699 // Shifting the index value contained in `index_reg` by the scale
700 // factor (2) cannot overflow in practice, as the runtime is
701 // unable to allocate object arrays with a size larger than
702 // 2^26 - 1 (that is, 2^28 - 4 bytes).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100703 __ Lsl(index_reg, index_reg, DataType::SizeShift(type));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000704 static_assert(
705 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
706 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
707 __ Add(index_reg, index_reg, Operand(offset_));
708 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100709 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
710 // intrinsics, `index_` is not shifted by a scale factor of 2
711 // (as in the case of ArrayGet), as it is actually an offset
712 // to an object field within an object.
713 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000714 DCHECK(instruction_->GetLocations()->Intrinsified());
715 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
716 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
717 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100718 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +0100719 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000720 }
721 }
722
723 // We're moving two or three locations to locations that could
724 // overlap, so we need a parallel move resolver.
725 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100726 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000727 parallel_move.AddMove(ref_,
728 LocationFrom(calling_convention.GetRegisterAt(0)),
729 type,
730 nullptr);
731 parallel_move.AddMove(obj_,
732 LocationFrom(calling_convention.GetRegisterAt(1)),
733 type,
734 nullptr);
735 if (index.IsValid()) {
736 parallel_move.AddMove(index,
737 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100738 DataType::Type::kInt32,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000739 nullptr);
740 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
741 } else {
742 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
743 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
744 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000745 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000746 instruction_,
747 instruction_->GetDexPc(),
748 this);
749 CheckEntrypointTypes<
750 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
751 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
752
753 RestoreLiveRegisters(codegen, locations);
754
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000755 __ B(GetExitLabel());
756 }
757
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100758 const char* GetDescription() const override { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000759
760 private:
761 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100762 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
763 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000764 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
765 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
766 return Register(VIXLRegCodeFromART(i), kXRegSize);
767 }
768 }
769 // We shall never fail to find a free caller-save register, as
770 // there are more than two core caller-save registers on ARM64
771 // (meaning it is possible to find one which is different from
772 // `ref` and `obj`).
773 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
774 LOG(FATAL) << "Could not find a free register";
775 UNREACHABLE();
776 }
777
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000778 const Location out_;
779 const Location ref_;
780 const Location obj_;
781 const uint32_t offset_;
782 // An additional location containing an index to an array.
783 // Only used for HArrayGet and the UnsafeGetObject &
784 // UnsafeGetObjectVolatile intrinsics.
785 const Location index_;
786
787 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
788};
789
790// Slow path generating a read barrier for a GC root.
791class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
792 public:
793 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000794 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000795 DCHECK(kEmitCompilerReadBarrier);
796 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000797
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100798 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000799 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100800 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000801 DCHECK(locations->CanCall());
802 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000803 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
804 << "Unexpected instruction in read barrier for GC root slow path: "
805 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000806
807 __ Bind(GetEntryLabel());
808 SaveLiveRegisters(codegen, locations);
809
810 InvokeRuntimeCallingConvention calling_convention;
811 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
812 // The argument of the ReadBarrierForRootSlow is not a managed
813 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
814 // thus we need a 64-bit move here, and we cannot use
815 //
816 // arm64_codegen->MoveLocation(
817 // LocationFrom(calling_convention.GetRegisterAt(0)),
818 // root_,
819 // type);
820 //
821 // which would emit a 32-bit move, as `type` is a (32-bit wide)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100822 // reference type (`DataType::Type::kReference`).
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000823 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000824 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000825 instruction_,
826 instruction_->GetDexPc(),
827 this);
828 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
829 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
830
831 RestoreLiveRegisters(codegen, locations);
832 __ B(GetExitLabel());
833 }
834
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100835 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathARM64"; }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000836
837 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000838 const Location out_;
839 const Location root_;
840
841 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
842};
843
Alexandre Rames5319def2014-10-23 10:03:10 +0100844#undef __
845
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100846Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(DataType::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100847 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100848 if (type == DataType::Type::kVoid) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100849 LOG(FATAL) << "Unreachable type " << type;
850 }
851
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100852 if (DataType::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100853 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
854 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100855 } else if (!DataType::IsFloatingPointType(type) &&
Alexandre Rames542361f2015-01-29 16:57:31 +0000856 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000857 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
858 } else {
859 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100860 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
861 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100862 }
863
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000864 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100865 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100866 return next_location;
867}
868
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100869Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100870 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100871}
872
Serban Constantinescu579885a2015-02-22 20:51:33 +0000873CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100874 const CompilerOptions& compiler_options,
875 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100876 : CodeGenerator(graph,
877 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000878 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000879 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100880 callee_saved_core_registers.GetList(),
881 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100882 compiler_options,
883 stats),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100884 block_labels_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
885 jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100886 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000887 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100888 move_resolver_(graph->GetAllocator(), this),
Artem Serovaa6f4832018-11-21 18:57:54 +0000889 assembler_(graph->GetAllocator(),
890 compiler_options.GetInstructionSetFeatures()->AsArm64InstructionSetFeatures()),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000891 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100892 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000893 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100894 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000895 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100896 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko2d06e022019-07-08 15:45:19 +0100897 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markof6675082019-05-17 12:05:28 +0100898 call_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100899 baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markof6675082019-05-17 12:05:28 +0100900 uint32_literals_(std::less<uint32_t>(),
901 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
902 uint64_literals_(std::less<uint64_t>(),
903 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000904 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100905 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +0000906 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Marko966b46f2018-08-03 10:20:19 +0000907 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
908 jit_baker_read_barrier_slow_paths_(std::less<uint32_t>(),
909 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000910 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000911 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000912}
Alexandre Rames5319def2014-10-23 10:03:10 +0100913
Alexandre Rames67555f72014-11-18 10:55:16 +0000914#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100915
Zheng Xu3927c8b2015-11-18 17:46:25 +0800916void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100917 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800918 jump_table->EmitTable(this);
919 }
920}
921
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000922void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800923 EmitJumpTables();
Vladimir Marko966b46f2018-08-03 10:20:19 +0000924
925 // Emit JIT baker read barrier slow paths.
926 DCHECK(Runtime::Current()->UseJitCompilation() || jit_baker_read_barrier_slow_paths_.empty());
927 for (auto& entry : jit_baker_read_barrier_slow_paths_) {
928 uint32_t encoded_data = entry.first;
929 vixl::aarch64::Label* slow_path_entry = &entry.second.label;
930 __ Bind(slow_path_entry);
Andreas Gampe3db70682018-12-26 15:12:03 -0800931 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name= */ nullptr);
Vladimir Marko966b46f2018-08-03 10:20:19 +0000932 }
933
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000934 // Ensure we emit the literal pool.
935 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000936
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000937 CodeGenerator::Finalize(allocator);
Vladimir Markoca1e0382018-04-11 09:58:41 +0000938
939 // Verify Baker read barrier linker patches.
940 if (kIsDebugBuild) {
941 ArrayRef<const uint8_t> code = allocator->GetMemory();
942 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
943 DCHECK(info.label.IsBound());
944 uint32_t literal_offset = info.label.GetLocation();
945 DCHECK_ALIGNED(literal_offset, 4u);
946
947 auto GetInsn = [&code](uint32_t offset) {
948 DCHECK_ALIGNED(offset, 4u);
949 return
950 (static_cast<uint32_t>(code[offset + 0]) << 0) +
951 (static_cast<uint32_t>(code[offset + 1]) << 8) +
952 (static_cast<uint32_t>(code[offset + 2]) << 16)+
953 (static_cast<uint32_t>(code[offset + 3]) << 24);
954 };
955
956 const uint32_t encoded_data = info.custom_data;
957 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
958 // Check that the next instruction matches the expected LDR.
959 switch (kind) {
Vladimir Marko0ecac682018-08-07 10:40:38 +0100960 case BakerReadBarrierKind::kField:
961 case BakerReadBarrierKind::kAcquire: {
Vladimir Markoca1e0382018-04-11 09:58:41 +0000962 DCHECK_GE(code.size() - literal_offset, 8u);
963 uint32_t next_insn = GetInsn(literal_offset + 4u);
Vladimir Markoca1e0382018-04-11 09:58:41 +0000964 CheckValidReg(next_insn & 0x1fu); // Check destination register.
965 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Marko0ecac682018-08-07 10:40:38 +0100966 if (kind == BakerReadBarrierKind::kField) {
967 // LDR (immediate) with correct base_reg.
968 CHECK_EQ(next_insn & 0xffc003e0u, 0xb9400000u | (base_reg << 5));
969 } else {
970 DCHECK(kind == BakerReadBarrierKind::kAcquire);
971 // LDAR with correct base_reg.
972 CHECK_EQ(next_insn & 0xffffffe0u, 0x88dffc00u | (base_reg << 5));
973 }
Vladimir Markoca1e0382018-04-11 09:58:41 +0000974 break;
975 }
976 case BakerReadBarrierKind::kArray: {
977 DCHECK_GE(code.size() - literal_offset, 8u);
978 uint32_t next_insn = GetInsn(literal_offset + 4u);
979 // LDR (register) with the correct base_reg, size=10 (32-bit), option=011 (extend = LSL),
980 // and S=1 (shift amount = 2 for 32-bit version), i.e. LDR Wt, [Xn, Xm, LSL #2].
981 CheckValidReg(next_insn & 0x1fu); // Check destination register.
982 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
983 CHECK_EQ(next_insn & 0xffe0ffe0u, 0xb8607800u | (base_reg << 5));
984 CheckValidReg((next_insn >> 16) & 0x1f); // Check index register
985 break;
986 }
987 case BakerReadBarrierKind::kGcRoot: {
988 DCHECK_GE(literal_offset, 4u);
989 uint32_t prev_insn = GetInsn(literal_offset - 4u);
Vladimir Markoca1e0382018-04-11 09:58:41 +0000990 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Marko94796f82018-08-08 15:15:33 +0100991 // Usually LDR (immediate) with correct root_reg but
992 // we may have a "MOV marked, old_value" for UnsafeCASObject.
993 if ((prev_insn & 0xffe0ffff) != (0x2a0003e0 | root_reg)) { // MOV?
994 CHECK_EQ(prev_insn & 0xffc0001fu, 0xb9400000u | root_reg); // LDR?
995 }
Vladimir Markoca1e0382018-04-11 09:58:41 +0000996 break;
997 }
998 default:
999 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
1000 UNREACHABLE();
1001 }
1002 }
1003 }
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001004}
1005
Zheng Xuad4450e2015-04-17 18:48:56 +08001006void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1007 // Note: There are 6 kinds of moves:
1008 // 1. constant -> GPR/FPR (non-cycle)
1009 // 2. constant -> stack (non-cycle)
1010 // 3. GPR/FPR -> GPR/FPR
1011 // 4. GPR/FPR -> stack
1012 // 5. stack -> GPR/FPR
1013 // 6. stack -> stack (non-cycle)
1014 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1015 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1016 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1017 // dependency.
1018 vixl_temps_.Open(GetVIXLAssembler());
1019}
1020
1021void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1022 vixl_temps_.Close();
1023}
1024
1025Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
Artem Serovd4bccf12017-04-03 18:47:32 +01001026 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister
1027 || kind == Location::kStackSlot || kind == Location::kDoubleStackSlot
1028 || kind == Location::kSIMDStackSlot);
1029 kind = (kind == Location::kFpuRegister || kind == Location::kSIMDStackSlot)
1030 ? Location::kFpuRegister
1031 : Location::kRegister;
Zheng Xuad4450e2015-04-17 18:48:56 +08001032 Location scratch = GetScratchLocation(kind);
1033 if (!scratch.Equals(Location::NoLocation())) {
1034 return scratch;
1035 }
1036 // Allocate from VIXL temp registers.
1037 if (kind == Location::kRegister) {
1038 scratch = LocationFrom(vixl_temps_.AcquireX());
1039 } else {
Roland Levillain952b2352017-05-03 19:49:14 +01001040 DCHECK_EQ(kind, Location::kFpuRegister);
Artem Serovd4bccf12017-04-03 18:47:32 +01001041 scratch = LocationFrom(codegen_->GetGraph()->HasSIMD()
1042 ? vixl_temps_.AcquireVRegisterOfSize(kQRegSize)
1043 : vixl_temps_.AcquireD());
Zheng Xuad4450e2015-04-17 18:48:56 +08001044 }
1045 AddScratchLocation(scratch);
1046 return scratch;
1047}
1048
1049void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1050 if (loc.IsRegister()) {
1051 vixl_temps_.Release(XRegisterFrom(loc));
1052 } else {
1053 DCHECK(loc.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001054 vixl_temps_.Release(codegen_->GetGraph()->HasSIMD() ? QRegisterFrom(loc) : DRegisterFrom(loc));
Zheng Xuad4450e2015-04-17 18:48:56 +08001055 }
1056 RemoveScratchLocation(loc);
1057}
1058
Alexandre Rames3e69f162014-12-10 10:36:50 +00001059void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001060 MoveOperands* move = moves_[index];
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001061 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001062}
1063
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001064void CodeGeneratorARM64::MaybeIncrementHotness(bool is_frame_entry) {
1065 MacroAssembler* masm = GetVIXLAssembler();
1066 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1067 UseScratchRegisterScope temps(masm);
1068 Register counter = temps.AcquireX();
1069 Register method = is_frame_entry ? kArtMethodRegister : temps.AcquireX();
1070 if (!is_frame_entry) {
1071 __ Ldr(method, MemOperand(sp, 0));
1072 }
1073 __ Ldrh(counter, MemOperand(method, ArtMethod::HotnessCountOffset().Int32Value()));
1074 __ Add(counter, counter, 1);
1075 // Subtract one if the counter would overflow.
1076 __ Sub(counter, counter, Operand(counter, LSR, 16));
1077 __ Strh(counter, MemOperand(method, ArtMethod::HotnessCountOffset().Int32Value()));
1078 }
1079
1080 if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) {
1081 ScopedObjectAccess soa(Thread::Current());
1082 ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
1083 uint32_t address = reinterpret_cast32<uint32_t>(info);
1084 vixl::aarch64::Label done;
1085 UseScratchRegisterScope temps(masm);
1086 Register temp = temps.AcquireX();
1087 Register counter = temps.AcquireW();
1088 __ Mov(temp, address);
1089 __ Ldrh(counter, MemOperand(temp, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
1090 __ Add(counter, counter, 1);
1091 __ Strh(counter, MemOperand(temp, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
1092 __ Tst(counter, 0xffff);
1093 __ B(ne, &done);
1094 if (is_frame_entry) {
1095 if (HasEmptyFrame()) {
1096 // The entyrpoint expects the method at the bottom of the stack. We
1097 // claim stack space necessary for alignment.
1098 __ Claim(kStackAlignment);
1099 __ Stp(kArtMethodRegister, lr, MemOperand(sp, 0));
1100 } else if (!RequiresCurrentMethod()) {
1101 __ Str(kArtMethodRegister, MemOperand(sp, 0));
1102 }
1103 } else {
1104 CHECK(RequiresCurrentMethod());
1105 }
1106 uint32_t entrypoint_offset =
1107 GetThreadOffset<kArm64PointerSize>(kQuickCompileOptimized).Int32Value();
1108 __ Ldr(lr, MemOperand(tr, entrypoint_offset));
1109 // Note: we don't record the call here (and therefore don't generate a stack
1110 // map), as the entrypoint should never be suspended.
1111 __ Blr(lr);
1112 if (HasEmptyFrame()) {
1113 CHECK(is_frame_entry);
1114 __ Ldr(lr, MemOperand(sp, 8));
1115 __ Drop(kStackAlignment);
1116 }
1117 __ Bind(&done);
1118 }
1119}
1120
Alexandre Rames5319def2014-10-23 10:03:10 +01001121void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001122 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001123 __ Bind(&frame_entry_label_);
1124
Vladimir Marko33bff252017-11-01 14:35:42 +00001125 bool do_overflow_check =
1126 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm64) || !IsLeafMethod();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001127 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001128 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001129 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001130 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Vladimir Marko33bff252017-11-01 14:35:42 +00001131 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001132 {
1133 // Ensure that between load and RecordPcInfo there are no pools emitted.
1134 ExactAssemblyScope eas(GetVIXLAssembler(),
1135 kInstructionSize,
1136 CodeBufferCheckScope::kExactSize);
1137 __ ldr(wzr, MemOperand(temp, 0));
1138 RecordPcInfo(nullptr, 0);
1139 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001140 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001141
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001142 if (!HasEmptyFrame()) {
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001143 // Stack layout:
1144 // sp[frame_size - 8] : lr.
1145 // ... : other preserved core registers.
1146 // ... : other preserved fp registers.
1147 // ... : reserved frame space.
1148 // sp[0] : current method.
Vladimir Marko1a225a72019-07-05 13:37:42 +01001149 int32_t frame_size = dchecked_integral_cast<int32_t>(GetFrameSize());
1150 uint32_t core_spills_offset = frame_size - GetCoreSpillSize();
1151 CPURegList preserved_core_registers = GetFramePreservedCoreRegisters();
1152 DCHECK(!preserved_core_registers.IsEmpty());
1153 uint32_t fp_spills_offset = frame_size - FrameEntrySpillSize();
1154 CPURegList preserved_fp_registers = GetFramePreservedFPRegisters();
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001155
Vladimir Marko1a225a72019-07-05 13:37:42 +01001156 // Save the current method if we need it, or if using STP reduces code
1157 // size. Note that we do not do this in HCurrentMethod, as the
1158 // instruction might have been removed in the SSA graph.
1159 CPURegister lowest_spill;
1160 if (core_spills_offset == kXRegSizeInBytes) {
1161 // If there is no gap between the method and the lowest core spill, use
1162 // aligned STP pre-index to store both. Max difference is 512. We do
1163 // that to reduce code size even if we do not have to save the method.
1164 DCHECK_LE(frame_size, 512); // 32 core registers are only 256 bytes.
1165 lowest_spill = preserved_core_registers.PopLowestIndex();
1166 __ Stp(kArtMethodRegister, lowest_spill, MemOperand(sp, -frame_size, PreIndex));
1167 } else if (RequiresCurrentMethod()) {
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001168 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001169 } else {
1170 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001171 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001172 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Vladimir Marko1a225a72019-07-05 13:37:42 +01001173 if (lowest_spill.IsValid()) {
1174 GetAssembler()->cfi().RelOffset(DWARFReg(lowest_spill), core_spills_offset);
1175 core_spills_offset += kXRegSizeInBytes;
1176 }
1177 GetAssembler()->SpillRegisters(preserved_core_registers, core_spills_offset);
1178 GetAssembler()->SpillRegisters(preserved_fp_registers, fp_spills_offset);
Mingyao Yang063fc772016-08-02 11:02:54 -07001179
1180 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1181 // Initialize should_deoptimize flag to 0.
1182 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1183 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1184 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001185 }
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001186 MaybeIncrementHotness(/* is_frame_entry= */ true);
Andreas Gampe3db70682018-12-26 15:12:03 -08001187 MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01001188}
1189
1190void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001191 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001192 if (!HasEmptyFrame()) {
Vladimir Marko1a225a72019-07-05 13:37:42 +01001193 int32_t frame_size = dchecked_integral_cast<int32_t>(GetFrameSize());
1194 uint32_t core_spills_offset = frame_size - GetCoreSpillSize();
1195 CPURegList preserved_core_registers = GetFramePreservedCoreRegisters();
1196 DCHECK(!preserved_core_registers.IsEmpty());
1197 uint32_t fp_spills_offset = frame_size - FrameEntrySpillSize();
1198 CPURegList preserved_fp_registers = GetFramePreservedFPRegisters();
1199
1200 CPURegister lowest_spill;
1201 if (core_spills_offset == kXRegSizeInBytes) {
1202 // If there is no gap between the method and the lowest core spill, use
1203 // aligned LDP pre-index to pop both. Max difference is 504. We do
1204 // that to reduce code size even though the loaded method is unused.
1205 DCHECK_LE(frame_size, 504); // 32 core registers are only 256 bytes.
1206 lowest_spill = preserved_core_registers.PopLowestIndex();
1207 core_spills_offset += kXRegSizeInBytes;
1208 }
1209 GetAssembler()->UnspillRegisters(preserved_fp_registers, fp_spills_offset);
1210 GetAssembler()->UnspillRegisters(preserved_core_registers, core_spills_offset);
1211 if (lowest_spill.IsValid()) {
1212 __ Ldp(xzr, lowest_spill, MemOperand(sp, frame_size, PostIndex));
1213 GetAssembler()->cfi().Restore(DWARFReg(lowest_spill));
1214 } else {
1215 __ Drop(frame_size);
1216 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001217 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001218 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001219 __ Ret();
1220 GetAssembler()->cfi().RestoreState();
1221 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001222}
1223
Scott Wakeling97c72b72016-06-24 16:19:36 +01001224CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001225 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001226 return CPURegList(CPURegister::kRegister, kXRegSize,
1227 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001228}
1229
Scott Wakeling97c72b72016-06-24 16:19:36 +01001230CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001231 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1232 GetNumberOfFloatingPointRegisters()));
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01001233 return CPURegList(CPURegister::kVRegister, kDRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001234 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001235}
1236
Alexandre Rames5319def2014-10-23 10:03:10 +01001237void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1238 __ Bind(GetLabelOf(block));
1239}
1240
Calin Juravle175dc732015-08-25 15:42:32 +01001241void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1242 DCHECK(location.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001243 __ Mov(RegisterFrom(location, DataType::Type::kInt32), value);
Calin Juravle175dc732015-08-25 15:42:32 +01001244}
1245
Calin Juravlee460d1d2015-09-29 04:52:17 +01001246void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1247 if (location.IsRegister()) {
1248 locations->AddTemp(location);
1249 } else {
1250 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1251 }
1252}
1253
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001254void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001255 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001256 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001257 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001258 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001259 if (value_can_be_null) {
1260 __ Cbz(value, &done);
1261 }
Roland Levillainc73f0522018-08-14 15:16:50 +01001262 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07001263 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Roland Levillainc73f0522018-08-14 15:16:50 +01001264 // Calculate the offset (in the card table) of the card corresponding to
1265 // `object`.
Alexandre Rames5319def2014-10-23 10:03:10 +01001266 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Roland Levillainc73f0522018-08-14 15:16:50 +01001267 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
1268 // `object`'s card.
1269 //
1270 // Register `card` contains the address of the card table. Note that the card
1271 // table's base is biased during its creation so that it always starts at an
1272 // address whose least-significant byte is equal to `kCardDirty` (see
1273 // art::gc::accounting::CardTable::Create). Therefore the STRB instruction
1274 // below writes the `kCardDirty` (byte) value into the `object`'s card
1275 // (located at `card + object >> kCardShift`).
1276 //
1277 // This dual use of the value in register `card` (1. to calculate the location
1278 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
1279 // (no need to explicitly load `kCardDirty` as an immediate value).
Serban Constantinescu02164b32014-11-13 14:05:07 +00001280 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001281 if (value_can_be_null) {
1282 __ Bind(&done);
1283 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001284}
1285
David Brazdil58282f42016-01-14 12:45:10 +00001286void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001287 // Blocked core registers:
1288 // lr : Runtime reserved.
1289 // tr : Runtime reserved.
Roland Levillain97c46462017-05-11 14:04:03 +01001290 // mr : Runtime reserved.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001291 // ip1 : VIXL core temp.
1292 // ip0 : VIXL core temp.
Peter Collingbournebd8e10c2018-04-12 16:39:55 -07001293 // x18 : Platform register.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001294 //
1295 // Blocked fp registers:
1296 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001297 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1298 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001299 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001300 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001301 }
Peter Collingbournebd8e10c2018-04-12 16:39:55 -07001302 blocked_core_registers_[X18] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001303
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001304 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001305 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001306 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001307 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001308
David Brazdil58282f42016-01-14 12:45:10 +00001309 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001310 // Stubs do not save callee-save floating point registers. If the graph
1311 // is debuggable, we need to deal with these registers differently. For
1312 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001313 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1314 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001315 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001316 }
1317 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001318}
1319
Alexandre Rames3e69f162014-12-10 10:36:50 +00001320size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1321 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1322 __ Str(reg, MemOperand(sp, stack_index));
1323 return kArm64WordSize;
1324}
1325
1326size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1327 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1328 __ Ldr(reg, MemOperand(sp, stack_index));
1329 return kArm64WordSize;
1330}
1331
Artem Serov9df37b92019-07-23 16:41:54 +01001332size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED,
1333 uint32_t reg_id ATTRIBUTE_UNUSED) {
1334 LOG(FATAL) << "FP registers shouldn't be saved/restored individually, "
1335 << "use SaveRestoreLiveRegistersHelper";
1336 UNREACHABLE();
Alexandre Rames3e69f162014-12-10 10:36:50 +00001337}
1338
Artem Serov9df37b92019-07-23 16:41:54 +01001339size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED,
1340 uint32_t reg_id ATTRIBUTE_UNUSED) {
1341 LOG(FATAL) << "FP registers shouldn't be saved/restored individually, "
1342 << "use SaveRestoreLiveRegistersHelper";
1343 UNREACHABLE();
Alexandre Rames3e69f162014-12-10 10:36:50 +00001344}
1345
Alexandre Rames5319def2014-10-23 10:03:10 +01001346void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001347 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001348}
1349
1350void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001351 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001352}
1353
Vladimir Markoa0431112018-06-25 09:32:54 +01001354const Arm64InstructionSetFeatures& CodeGeneratorARM64::GetInstructionSetFeatures() const {
1355 return *GetCompilerOptions().GetInstructionSetFeatures()->AsArm64InstructionSetFeatures();
1356}
1357
Alexandre Rames67555f72014-11-18 10:55:16 +00001358void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001359 if (constant->IsIntConstant()) {
1360 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1361 } else if (constant->IsLongConstant()) {
1362 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1363 } else if (constant->IsNullConstant()) {
1364 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001365 } else if (constant->IsFloatConstant()) {
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01001366 __ Fmov(VRegister(destination), constant->AsFloatConstant()->GetValue());
Alexandre Rames67555f72014-11-18 10:55:16 +00001367 } else {
1368 DCHECK(constant->IsDoubleConstant());
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01001369 __ Fmov(VRegister(destination), constant->AsDoubleConstant()->GetValue());
Alexandre Rames67555f72014-11-18 10:55:16 +00001370 }
1371}
1372
Alexandre Rames3e69f162014-12-10 10:36:50 +00001373
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001374static bool CoherentConstantAndType(Location constant, DataType::Type type) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001375 DCHECK(constant.IsConstant());
1376 HConstant* cst = constant.GetConstant();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001377 return (cst->IsIntConstant() && type == DataType::Type::kInt32) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001378 // Null is mapped to a core W register, which we associate with kPrimInt.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001379 (cst->IsNullConstant() && type == DataType::Type::kInt32) ||
1380 (cst->IsLongConstant() && type == DataType::Type::kInt64) ||
1381 (cst->IsFloatConstant() && type == DataType::Type::kFloat32) ||
1382 (cst->IsDoubleConstant() && type == DataType::Type::kFloat64);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001383}
1384
Roland Levillain952b2352017-05-03 19:49:14 +01001385// Allocate a scratch register from the VIXL pool, querying first
1386// the floating-point register pool, and then the core register
1387// pool. This is essentially a reimplementation of
Roland Levillain558dea12017-01-27 19:40:44 +00001388// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1389// using a different allocation strategy.
1390static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1391 vixl::aarch64::UseScratchRegisterScope* temps,
1392 int size_in_bits) {
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01001393 return masm->GetScratchVRegisterList()->IsEmpty()
Roland Levillain558dea12017-01-27 19:40:44 +00001394 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1395 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1396}
1397
Calin Juravlee460d1d2015-09-29 04:52:17 +01001398void CodeGeneratorARM64::MoveLocation(Location destination,
1399 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001400 DataType::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001401 if (source.Equals(destination)) {
1402 return;
1403 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001404
1405 // A valid move can always be inferred from the destination and source
1406 // locations. When moving from and to a register, the argument type can be
1407 // used to generate 32bit instead of 64bit moves. In debug mode we also
1408 // checks the coherency of the locations and the type.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001409 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001410
1411 if (destination.IsRegister() || destination.IsFpuRegister()) {
1412 if (unspecified_type) {
1413 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1414 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001415 (src_cst != nullptr && (src_cst->IsIntConstant()
1416 || src_cst->IsFloatConstant()
1417 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001418 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001419 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexandre Rames67555f72014-11-18 10:55:16 +00001420 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001421 // If the source is a double stack slot or a 64bit constant, a 64bit
1422 // type is appropriate. Else the source is a register, and since the
1423 // type has not been specified, we chose a 64bit type to force a 64bit
1424 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001425 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexandre Rames67555f72014-11-18 10:55:16 +00001426 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001427 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001428 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1429 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001430 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001431 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1432 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1433 __ Ldr(dst, StackOperandFrom(source));
Artem Serovd4bccf12017-04-03 18:47:32 +01001434 } else if (source.IsSIMDStackSlot()) {
1435 __ Ldr(QRegisterFrom(destination), StackOperandFrom(source));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001436 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001437 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001438 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001439 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001440 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001441 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001442 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001443 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001444 DataType::Type source_type = DataType::Is64BitType(dst_type)
1445 ? DataType::Type::kInt64
1446 : DataType::Type::kInt32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001447 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1448 }
1449 } else {
1450 DCHECK(source.IsFpuRegister());
1451 if (destination.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001452 DataType::Type source_type = DataType::Is64BitType(dst_type)
1453 ? DataType::Type::kFloat64
1454 : DataType::Type::kFloat32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001455 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1456 } else {
1457 DCHECK(destination.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001458 if (GetGraph()->HasSIMD()) {
1459 __ Mov(QRegisterFrom(destination), QRegisterFrom(source));
1460 } else {
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01001461 __ Fmov(VRegister(dst), FPRegisterFrom(source, dst_type));
Artem Serovd4bccf12017-04-03 18:47:32 +01001462 }
1463 }
1464 }
1465 } else if (destination.IsSIMDStackSlot()) {
1466 if (source.IsFpuRegister()) {
1467 __ Str(QRegisterFrom(source), StackOperandFrom(destination));
1468 } else {
1469 DCHECK(source.IsSIMDStackSlot());
1470 UseScratchRegisterScope temps(GetVIXLAssembler());
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01001471 if (GetVIXLAssembler()->GetScratchVRegisterList()->IsEmpty()) {
Artem Serovd4bccf12017-04-03 18:47:32 +01001472 Register temp = temps.AcquireX();
1473 __ Ldr(temp, MemOperand(sp, source.GetStackIndex()));
1474 __ Str(temp, MemOperand(sp, destination.GetStackIndex()));
1475 __ Ldr(temp, MemOperand(sp, source.GetStackIndex() + kArm64WordSize));
1476 __ Str(temp, MemOperand(sp, destination.GetStackIndex() + kArm64WordSize));
1477 } else {
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01001478 VRegister temp = temps.AcquireVRegisterOfSize(kQRegSize);
Artem Serovd4bccf12017-04-03 18:47:32 +01001479 __ Ldr(temp, StackOperandFrom(source));
1480 __ Str(temp, StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001481 }
1482 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001483 } else { // The destination is not a register. It must be a stack slot.
1484 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1485 if (source.IsRegister() || source.IsFpuRegister()) {
1486 if (unspecified_type) {
1487 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001488 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001489 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001490 dst_type =
1491 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001492 }
1493 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001494 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1495 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001496 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001497 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001498 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1499 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001500 UseScratchRegisterScope temps(GetVIXLAssembler());
1501 HConstant* src_cst = source.GetConstant();
1502 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001503 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001504 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1505 ? Register(xzr)
1506 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001507 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001508 if (src_cst->IsIntConstant()) {
1509 temp = temps.AcquireW();
1510 } else if (src_cst->IsLongConstant()) {
1511 temp = temps.AcquireX();
1512 } else if (src_cst->IsFloatConstant()) {
1513 temp = temps.AcquireS();
1514 } else {
1515 DCHECK(src_cst->IsDoubleConstant());
1516 temp = temps.AcquireD();
1517 }
1518 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001519 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001520 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001521 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001522 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001523 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001524 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001525 // Use any scratch register (a core or a floating-point one)
1526 // from VIXL scratch register pools as a temporary.
1527 //
1528 // We used to only use the FP scratch register pool, but in some
1529 // rare cases the only register from this pool (D31) would
1530 // already be used (e.g. within a ParallelMove instruction, when
1531 // a move is blocked by a another move requiring a scratch FP
1532 // register, which would reserve D31). To prevent this issue, we
1533 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001534 //
1535 // Also, we start by asking for a FP scratch register first, as the
Roland Levillain952b2352017-05-03 19:49:14 +01001536 // demand of scratch core registers is higher. This is why we
Roland Levillain558dea12017-01-27 19:40:44 +00001537 // use AcquireFPOrCoreCPURegisterOfSize instead of
1538 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1539 // allocates core scratch registers first.
1540 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1541 GetVIXLAssembler(),
1542 &temps,
1543 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001544 __ Ldr(temp, StackOperandFrom(source));
1545 __ Str(temp, StackOperandFrom(destination));
1546 }
1547 }
1548}
1549
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001550void CodeGeneratorARM64::Load(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001551 CPURegister dst,
1552 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001553 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001554 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001555 case DataType::Type::kUint8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001556 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001557 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001558 case DataType::Type::kInt8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001559 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001560 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001561 case DataType::Type::kUint16:
Alexandre Rames67555f72014-11-18 10:55:16 +00001562 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001563 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001564 case DataType::Type::kInt16:
1565 __ Ldrsh(Register(dst), src);
1566 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001567 case DataType::Type::kInt32:
1568 case DataType::Type::kReference:
1569 case DataType::Type::kInt64:
1570 case DataType::Type::kFloat32:
1571 case DataType::Type::kFloat64:
1572 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001573 __ Ldr(dst, src);
1574 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001575 case DataType::Type::kUint32:
1576 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001577 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001578 LOG(FATAL) << "Unreachable type " << type;
1579 }
1580}
1581
Calin Juravle77520bc2015-01-12 18:45:46 +00001582void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001583 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001584 const MemOperand& src,
1585 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001586 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001587 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001588 Register temp_base = temps.AcquireX();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001589 DataType::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001590
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001591 DCHECK(!src.IsPreIndex());
1592 DCHECK(!src.IsPostIndex());
1593
1594 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001595 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001596 {
1597 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1598 MemOperand base = MemOperand(temp_base);
1599 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001600 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001601 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001602 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001603 {
1604 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1605 __ ldarb(Register(dst), base);
1606 if (needs_null_check) {
1607 MaybeRecordImplicitNullCheck(instruction);
1608 }
1609 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001610 if (type == DataType::Type::kInt8) {
1611 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
Artem Serov914d7a82017-02-07 14:33:49 +00001612 }
1613 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001614 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001615 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00001616 {
1617 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1618 __ ldarh(Register(dst), base);
1619 if (needs_null_check) {
1620 MaybeRecordImplicitNullCheck(instruction);
1621 }
1622 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001623 if (type == DataType::Type::kInt16) {
1624 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
1625 }
Artem Serov914d7a82017-02-07 14:33:49 +00001626 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001627 case DataType::Type::kInt32:
1628 case DataType::Type::kReference:
1629 case DataType::Type::kInt64:
1630 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001631 {
1632 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1633 __ ldar(Register(dst), base);
1634 if (needs_null_check) {
1635 MaybeRecordImplicitNullCheck(instruction);
1636 }
1637 }
1638 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001639 case DataType::Type::kFloat32:
1640 case DataType::Type::kFloat64: {
Artem Serov914d7a82017-02-07 14:33:49 +00001641 DCHECK(dst.IsFPRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001642 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001643
Artem Serov914d7a82017-02-07 14:33:49 +00001644 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1645 {
1646 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1647 __ ldar(temp, base);
1648 if (needs_null_check) {
1649 MaybeRecordImplicitNullCheck(instruction);
1650 }
1651 }
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01001652 __ Fmov(VRegister(dst), temp);
Artem Serov914d7a82017-02-07 14:33:49 +00001653 break;
Roland Levillain44015862016-01-22 11:47:17 +00001654 }
Aart Bik66c158e2018-01-31 12:55:04 -08001655 case DataType::Type::kUint32:
1656 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001657 case DataType::Type::kVoid:
Artem Serov914d7a82017-02-07 14:33:49 +00001658 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001659 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001660 }
1661}
1662
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001663void CodeGeneratorARM64::Store(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001664 CPURegister src,
1665 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001666 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001667 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001668 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001669 case DataType::Type::kInt8:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001670 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001671 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001672 case DataType::Type::kUint16:
1673 case DataType::Type::kInt16:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001674 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001675 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001676 case DataType::Type::kInt32:
1677 case DataType::Type::kReference:
1678 case DataType::Type::kInt64:
1679 case DataType::Type::kFloat32:
1680 case DataType::Type::kFloat64:
1681 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001682 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001683 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001684 case DataType::Type::kUint32:
1685 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001686 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001687 LOG(FATAL) << "Unreachable type " << type;
1688 }
1689}
1690
Artem Serov914d7a82017-02-07 14:33:49 +00001691void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001692 DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001693 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00001694 const MemOperand& dst,
1695 bool needs_null_check) {
1696 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001697 UseScratchRegisterScope temps(GetVIXLAssembler());
1698 Register temp_base = temps.AcquireX();
1699
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001700 DCHECK(!dst.IsPreIndex());
1701 DCHECK(!dst.IsPostIndex());
1702
1703 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001704 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001705 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001706 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00001707 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001708 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001709 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001710 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001711 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001712 {
1713 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1714 __ stlrb(Register(src), base);
1715 if (needs_null_check) {
1716 MaybeRecordImplicitNullCheck(instruction);
1717 }
1718 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001719 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001720 case DataType::Type::kUint16:
1721 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00001722 {
1723 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1724 __ stlrh(Register(src), base);
1725 if (needs_null_check) {
1726 MaybeRecordImplicitNullCheck(instruction);
1727 }
1728 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001729 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001730 case DataType::Type::kInt32:
1731 case DataType::Type::kReference:
1732 case DataType::Type::kInt64:
1733 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001734 {
1735 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1736 __ stlr(Register(src), base);
1737 if (needs_null_check) {
1738 MaybeRecordImplicitNullCheck(instruction);
1739 }
1740 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001741 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001742 case DataType::Type::kFloat32:
1743 case DataType::Type::kFloat64: {
1744 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001745 Register temp_src;
1746 if (src.IsZero()) {
1747 // The zero register is used to avoid synthesizing zero constants.
1748 temp_src = Register(src);
1749 } else {
1750 DCHECK(src.IsFPRegister());
1751 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01001752 __ Fmov(temp_src, VRegister(src));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001753 }
Artem Serov914d7a82017-02-07 14:33:49 +00001754 {
1755 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1756 __ stlr(temp_src, base);
1757 if (needs_null_check) {
1758 MaybeRecordImplicitNullCheck(instruction);
1759 }
1760 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001761 break;
1762 }
Aart Bik66c158e2018-01-31 12:55:04 -08001763 case DataType::Type::kUint32:
1764 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001765 case DataType::Type::kVoid:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001766 LOG(FATAL) << "Unreachable type " << type;
1767 }
1768}
1769
Calin Juravle175dc732015-08-25 15:42:32 +01001770void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1771 HInstruction* instruction,
1772 uint32_t dex_pc,
1773 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001774 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00001775
Vladimir Markof6675082019-05-17 12:05:28 +01001776 ThreadOffset64 entrypoint_offset = GetThreadOffset<kArm64PointerSize>(entrypoint);
1777 // Reduce code size for AOT by using shared trampolines for slow path runtime calls across the
1778 // entire oat file. This adds an extra branch and we do not want to slow down the main path.
1779 // For JIT, thunk sharing is per-method, so the gains would be smaller or even negative.
1780 if (slow_path == nullptr || Runtime::Current()->UseJitCompilation()) {
1781 __ Ldr(lr, MemOperand(tr, entrypoint_offset.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00001782 // Ensure the pc position is recorded immediately after the `blr` instruction.
1783 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
1784 __ blr(lr);
1785 if (EntrypointRequiresStackMap(entrypoint)) {
1786 RecordPcInfo(instruction, dex_pc, slow_path);
1787 }
Vladimir Markof6675082019-05-17 12:05:28 +01001788 } else {
1789 // Ensure the pc position is recorded immediately after the `bl` instruction.
1790 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
1791 EmitEntrypointThunkCall(entrypoint_offset);
1792 if (EntrypointRequiresStackMap(entrypoint)) {
1793 RecordPcInfo(instruction, dex_pc, slow_path);
1794 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001795 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001796}
1797
Roland Levillaindec8f632016-07-22 17:10:06 +01001798void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1799 HInstruction* instruction,
1800 SlowPathCode* slow_path) {
1801 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01001802 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1803 __ Blr(lr);
1804}
1805
Alexandre Rames67555f72014-11-18 10:55:16 +00001806void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001807 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001808 UseScratchRegisterScope temps(GetVIXLAssembler());
1809 Register temp = temps.AcquireW();
Vladimir Markodc682aa2018-01-04 18:42:57 +00001810 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
Vladimir Marko2bb44fe2019-10-04 12:28:14 +01001811 const size_t status_byte_offset =
1812 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
1813 constexpr uint32_t shifted_visibly_initialized_value =
1814 enum_cast<uint32_t>(ClassStatus::kVisiblyInitialized) << (status_lsb_position % kBitsPerByte);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001815
Vladimir Marko2bb44fe2019-10-04 12:28:14 +01001816 // CMP (immediate) is limited to imm12 or imm12<<12, so we would need to materialize
1817 // the constant 0xf0000000 for comparison with the full 32-bit field. To reduce the code
1818 // size, load only the high byte of the field and compare with 0xf0.
1819 // Note: The same code size could be achieved with LDR+MNV(asr #24)+CBNZ but benchmarks
1820 // show that this pattern is slower (tested on little cores).
1821 __ Ldrb(temp, HeapOperand(class_reg, status_byte_offset));
1822 __ Cmp(temp, shifted_visibly_initialized_value);
1823 __ B(lo, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001824 __ Bind(slow_path->GetExitLabel());
1825}
Alexandre Rames5319def2014-10-23 10:03:10 +01001826
Vladimir Marko175e7862018-03-27 09:03:13 +00001827void InstructionCodeGeneratorARM64::GenerateBitstringTypeCheckCompare(
1828 HTypeCheckInstruction* check, vixl::aarch64::Register temp) {
1829 uint32_t path_to_root = check->GetBitstringPathToRoot();
1830 uint32_t mask = check->GetBitstringMask();
1831 DCHECK(IsPowerOfTwo(mask + 1));
1832 size_t mask_bits = WhichPowerOf2(mask + 1);
1833
1834 if (mask_bits == 16u) {
1835 // Load only the bitstring part of the status word.
1836 __ Ldrh(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
1837 } else {
1838 // /* uint32_t */ temp = temp->status_
1839 __ Ldr(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
1840 // Extract the bitstring bits.
1841 __ Ubfx(temp, temp, 0, mask_bits);
1842 }
1843 // Compare the bitstring bits to `path_to_root`.
1844 __ Cmp(temp, path_to_root);
1845}
1846
Roland Levillain44015862016-01-22 11:47:17 +00001847void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001848 BarrierType type = BarrierAll;
1849
1850 switch (kind) {
1851 case MemBarrierKind::kAnyAny:
1852 case MemBarrierKind::kAnyStore: {
1853 type = BarrierAll;
1854 break;
1855 }
1856 case MemBarrierKind::kLoadAny: {
1857 type = BarrierReads;
1858 break;
1859 }
1860 case MemBarrierKind::kStoreStore: {
1861 type = BarrierWrites;
1862 break;
1863 }
1864 default:
1865 LOG(FATAL) << "Unexpected memory barrier " << kind;
1866 }
1867 __ Dmb(InnerShareable, type);
1868}
1869
Serban Constantinescu02164b32014-11-13 14:05:07 +00001870void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1871 HBasicBlock* successor) {
1872 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001873 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1874 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001875 slow_path =
1876 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathARM64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001877 instruction->SetSlowPath(slow_path);
1878 codegen_->AddSlowPath(slow_path);
1879 if (successor != nullptr) {
1880 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001881 }
1882 } else {
1883 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1884 }
1885
Serban Constantinescu02164b32014-11-13 14:05:07 +00001886 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1887 Register temp = temps.AcquireW();
1888
Andreas Gampe542451c2016-07-26 09:02:02 -07001889 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001890 if (successor == nullptr) {
1891 __ Cbnz(temp, slow_path->GetEntryLabel());
1892 __ Bind(slow_path->GetReturnLabel());
1893 } else {
1894 __ Cbz(temp, codegen_->GetLabelOf(successor));
1895 __ B(slow_path->GetEntryLabel());
1896 // slow_path will return to GetLabelOf(successor).
1897 }
1898}
1899
Alexandre Rames5319def2014-10-23 10:03:10 +01001900InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1901 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001902 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001903 assembler_(codegen->GetAssembler()),
1904 codegen_(codegen) {}
1905
Alexandre Rames67555f72014-11-18 10:55:16 +00001906void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001907 DCHECK_EQ(instr->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001908 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001909 DataType::Type type = instr->GetResultType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001910 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001911 case DataType::Type::kInt32:
1912 case DataType::Type::kInt64:
Alexandre Rames5319def2014-10-23 10:03:10 +01001913 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001914 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001915 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001916 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001917
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001918 case DataType::Type::kFloat32:
1919 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001920 locations->SetInAt(0, Location::RequiresFpuRegister());
1921 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001922 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001923 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001924
Alexandre Rames5319def2014-10-23 10:03:10 +01001925 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001926 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001927 }
1928}
1929
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001930void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction,
1931 const FieldInfo& field_info) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001932 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1933
1934 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001935 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Rames09a99962015-04-15 11:47:56 +01001936 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001937 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
1938 object_field_get_with_read_barrier
1939 ? LocationSummary::kCallOnSlowPath
1940 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01001941 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001942 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko0ecac682018-08-07 10:40:38 +01001943 // We need a temporary register for the read barrier load in
1944 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
1945 // only if the field is volatile or the offset is too big.
1946 if (field_info.IsVolatile() ||
1947 field_info.GetFieldOffset().Uint32Value() >= kReferenceLoadMinFarOffset) {
1948 locations->AddTemp(FixedTempLocation());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001949 }
Vladimir Marko70e97462016-08-09 11:04:26 +01001950 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001951 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001952 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001953 locations->SetOut(Location::RequiresFpuRegister());
1954 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001955 // The output overlaps for an object field get when read barriers
1956 // are enabled: we do not want the load to overwrite the object's
1957 // location, as we need it to emit the read barrier.
1958 locations->SetOut(
1959 Location::RequiresRegister(),
1960 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001961 }
1962}
1963
1964void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1965 const FieldInfo& field_info) {
1966 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001967 LocationSummary* locations = instruction->GetLocations();
1968 Location base_loc = locations->InAt(0);
1969 Location out = locations->Out();
1970 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Vladimir Marko61b92282017-10-11 13:23:17 +01001971 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
1972 DataType::Type load_type = instruction->GetType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001973 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001974
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001975 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier &&
Vladimir Marko61b92282017-10-11 13:23:17 +01001976 load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00001977 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00001978 // /* HeapReference<Object> */ out = *(base + offset)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001979 Register base = RegisterFrom(base_loc, DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001980 Location maybe_temp =
1981 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
Roland Levillain44015862016-01-22 11:47:17 +00001982 // Note that potential implicit null checks are handled in this
1983 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1984 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1985 instruction,
1986 out,
1987 base,
1988 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001989 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08001990 /* needs_null_check= */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001991 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001992 } else {
1993 // General case.
1994 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001995 // Note that a potential implicit null check is handled in this
1996 // CodeGeneratorARM64::LoadAcquire call.
1997 // NB: LoadAcquire will record the pc info if needed.
1998 codegen_->LoadAcquire(
Andreas Gampe3db70682018-12-26 15:12:03 -08001999 instruction, OutputCPURegister(instruction), field, /* needs_null_check= */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01002000 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002001 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2002 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Vladimir Marko61b92282017-10-11 13:23:17 +01002003 codegen_->Load(load_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01002004 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01002005 }
Vladimir Marko61b92282017-10-11 13:23:17 +01002006 if (load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002007 // If read barriers are enabled, emit read barriers other than
2008 // Baker's using a slow path (and also unpoison the loaded
2009 // reference, if heap poisoning is enabled).
2010 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
2011 }
Roland Levillain4d027112015-07-01 15:41:14 +01002012 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002013}
2014
2015void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
2016 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002017 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01002018 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002019 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
2020 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002021 } else if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002022 locations->SetInAt(1, Location::RequiresFpuRegister());
2023 } else {
2024 locations->SetInAt(1, Location::RequiresRegister());
2025 }
2026}
2027
2028void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002029 const FieldInfo& field_info,
2030 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002031 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2032
2033 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002034 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01002035 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01002036 Offset offset = field_info.GetFieldOffset();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002037 DataType::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002038
Roland Levillain4d027112015-07-01 15:41:14 +01002039 {
2040 // We use a block to end the scratch scope before the write barrier, thus
2041 // freeing the temporary registers so they can be used in `MarkGCCard`.
2042 UseScratchRegisterScope temps(GetVIXLAssembler());
2043
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002044 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01002045 DCHECK(value.IsW());
2046 Register temp = temps.AcquireW();
2047 __ Mov(temp, value.W());
2048 GetAssembler()->PoisonHeapReference(temp.W());
2049 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01002050 }
Roland Levillain4d027112015-07-01 15:41:14 +01002051
2052 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00002053 codegen_->StoreRelease(
Andreas Gampe3db70682018-12-26 15:12:03 -08002054 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check= */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01002055 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002056 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2057 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002058 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2059 codegen_->MaybeRecordImplicitNullCheck(instruction);
2060 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002061 }
2062
2063 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002064 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002065 }
2066}
2067
Alexandre Rames67555f72014-11-18 10:55:16 +00002068void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002069 DataType::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002070
2071 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002072 case DataType::Type::kInt32:
2073 case DataType::Type::kInt64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002074 Register dst = OutputRegister(instr);
2075 Register lhs = InputRegisterAt(instr, 0);
2076 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002077 if (instr->IsAdd()) {
2078 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002079 } else if (instr->IsAnd()) {
2080 __ And(dst, lhs, rhs);
2081 } else if (instr->IsOr()) {
2082 __ Orr(dst, lhs, rhs);
2083 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002084 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002085 } else if (instr->IsRor()) {
2086 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002087 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002088 __ Ror(dst, lhs, shift);
2089 } else {
2090 // Ensure shift distance is in the same size register as the result. If
2091 // we are rotating a long and the shift comes in a w register originally,
2092 // we don't need to sxtw for use as an x since the shift distances are
2093 // all & reg_bits - 1.
2094 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2095 }
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002096 } else if (instr->IsMin() || instr->IsMax()) {
2097 __ Cmp(lhs, rhs);
2098 __ Csel(dst, lhs, rhs, instr->IsMin() ? lt : gt);
Alexandre Rames67555f72014-11-18 10:55:16 +00002099 } else {
2100 DCHECK(instr->IsXor());
2101 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002102 }
2103 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002104 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002105 case DataType::Type::kFloat32:
2106 case DataType::Type::kFloat64: {
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01002107 VRegister dst = OutputFPRegister(instr);
2108 VRegister lhs = InputFPRegisterAt(instr, 0);
2109 VRegister rhs = InputFPRegisterAt(instr, 1);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002110 if (instr->IsAdd()) {
2111 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002112 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002113 __ Fsub(dst, lhs, rhs);
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002114 } else if (instr->IsMin()) {
2115 __ Fmin(dst, lhs, rhs);
2116 } else if (instr->IsMax()) {
2117 __ Fmax(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002118 } else {
2119 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002120 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002121 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002122 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002123 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002124 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002125 }
2126}
2127
Serban Constantinescu02164b32014-11-13 14:05:07 +00002128void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2129 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2130
Vladimir Markoca6fff82017-10-03 14:49:14 +01002131 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002132 DataType::Type type = instr->GetResultType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002133 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002134 case DataType::Type::kInt32:
2135 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002136 locations->SetInAt(0, Location::RequiresRegister());
2137 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Artem Serov87c97052016-09-23 13:34:31 +01002138 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002139 break;
2140 }
2141 default:
2142 LOG(FATAL) << "Unexpected shift type " << type;
2143 }
2144}
2145
2146void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2147 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2148
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002149 DataType::Type type = instr->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002150 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002151 case DataType::Type::kInt32:
2152 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002153 Register dst = OutputRegister(instr);
2154 Register lhs = InputRegisterAt(instr, 0);
2155 Operand rhs = InputOperandAt(instr, 1);
2156 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002157 uint32_t shift_value = rhs.GetImmediate() &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002158 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002159 if (instr->IsShl()) {
2160 __ Lsl(dst, lhs, shift_value);
2161 } else if (instr->IsShr()) {
2162 __ Asr(dst, lhs, shift_value);
2163 } else {
2164 __ Lsr(dst, lhs, shift_value);
2165 }
2166 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002167 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002168
2169 if (instr->IsShl()) {
2170 __ Lsl(dst, lhs, rhs_reg);
2171 } else if (instr->IsShr()) {
2172 __ Asr(dst, lhs, rhs_reg);
2173 } else {
2174 __ Lsr(dst, lhs, rhs_reg);
2175 }
2176 }
2177 break;
2178 }
2179 default:
2180 LOG(FATAL) << "Unexpected shift operation type " << type;
2181 }
2182}
2183
Alexandre Rames5319def2014-10-23 10:03:10 +01002184void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002185 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002186}
2187
2188void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002189 HandleBinaryOp(instruction);
2190}
2191
2192void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2193 HandleBinaryOp(instruction);
2194}
2195
2196void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2197 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002198}
2199
Artem Serov7fc63502016-02-09 17:15:29 +00002200void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002201 DCHECK(DataType::IsIntegralType(instr->GetType())) << instr->GetType();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002202 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002203 locations->SetInAt(0, Location::RequiresRegister());
2204 // There is no immediate variant of negated bitwise instructions in AArch64.
2205 locations->SetInAt(1, Location::RequiresRegister());
2206 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2207}
2208
Artem Serov7fc63502016-02-09 17:15:29 +00002209void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002210 Register dst = OutputRegister(instr);
2211 Register lhs = InputRegisterAt(instr, 0);
2212 Register rhs = InputRegisterAt(instr, 1);
2213
2214 switch (instr->GetOpKind()) {
2215 case HInstruction::kAnd:
2216 __ Bic(dst, lhs, rhs);
2217 break;
2218 case HInstruction::kOr:
2219 __ Orn(dst, lhs, rhs);
2220 break;
2221 case HInstruction::kXor:
2222 __ Eon(dst, lhs, rhs);
2223 break;
2224 default:
2225 LOG(FATAL) << "Unreachable";
2226 }
2227}
2228
Anton Kirilov74234da2017-01-13 14:42:47 +00002229void LocationsBuilderARM64::VisitDataProcWithShifterOp(
2230 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002231 DCHECK(instruction->GetType() == DataType::Type::kInt32 ||
2232 instruction->GetType() == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002233 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002234 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames8626b742015-11-25 16:28:08 +00002235 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2236 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2237 } else {
2238 locations->SetInAt(0, Location::RequiresRegister());
2239 }
2240 locations->SetInAt(1, Location::RequiresRegister());
2241 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2242}
2243
Anton Kirilov74234da2017-01-13 14:42:47 +00002244void InstructionCodeGeneratorARM64::VisitDataProcWithShifterOp(
2245 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002246 DataType::Type type = instruction->GetType();
Alexandre Rames8626b742015-11-25 16:28:08 +00002247 HInstruction::InstructionKind kind = instruction->GetInstrKind();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002248 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002249 Register out = OutputRegister(instruction);
2250 Register left;
2251 if (kind != HInstruction::kNeg) {
2252 left = InputRegisterAt(instruction, 0);
2253 }
Anton Kirilov74234da2017-01-13 14:42:47 +00002254 // If this `HDataProcWithShifterOp` was created by merging a type conversion as the
Alexandre Rames8626b742015-11-25 16:28:08 +00002255 // shifter operand operation, the IR generating `right_reg` (input to the type
2256 // conversion) can have a different type from the current instruction's type,
2257 // so we manually indicate the type.
2258 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Alexandre Rames8626b742015-11-25 16:28:08 +00002259 Operand right_operand(0);
2260
Anton Kirilov74234da2017-01-13 14:42:47 +00002261 HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2262 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002263 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2264 } else {
Anton Kirilov74234da2017-01-13 14:42:47 +00002265 right_operand = Operand(right_reg,
2266 helpers::ShiftFromOpKind(op_kind),
2267 instruction->GetShiftAmount());
Alexandre Rames8626b742015-11-25 16:28:08 +00002268 }
2269
2270 // Logical binary operations do not support extension operations in the
2271 // operand. Note that VIXL would still manage if it was passed by generating
2272 // the extension as a separate instruction.
2273 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2274 DCHECK(!right_operand.IsExtendedRegister() ||
2275 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2276 kind != HInstruction::kNeg));
2277 switch (kind) {
2278 case HInstruction::kAdd:
2279 __ Add(out, left, right_operand);
2280 break;
2281 case HInstruction::kAnd:
2282 __ And(out, left, right_operand);
2283 break;
2284 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002285 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002286 __ Neg(out, right_operand);
2287 break;
2288 case HInstruction::kOr:
2289 __ Orr(out, left, right_operand);
2290 break;
2291 case HInstruction::kSub:
2292 __ Sub(out, left, right_operand);
2293 break;
2294 case HInstruction::kXor:
2295 __ Eor(out, left, right_operand);
2296 break;
2297 default:
2298 LOG(FATAL) << "Unexpected operation kind: " << kind;
2299 UNREACHABLE();
2300 }
2301}
2302
Artem Serov328429f2016-07-06 16:23:04 +01002303void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002304 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002305 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002306 locations->SetInAt(0, Location::RequiresRegister());
2307 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
Artem Serov87c97052016-09-23 13:34:31 +01002308 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002309}
2310
Roland Levillain19c54192016-11-04 13:44:09 +00002311void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002312 __ Add(OutputRegister(instruction),
2313 InputRegisterAt(instruction, 0),
2314 Operand(InputOperandAt(instruction, 1)));
2315}
2316
Artem Serove1811ed2017-04-27 16:50:47 +01002317void LocationsBuilderARM64::VisitIntermediateAddressIndex(HIntermediateAddressIndex* instruction) {
2318 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002319 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serove1811ed2017-04-27 16:50:47 +01002320
2321 HIntConstant* shift = instruction->GetShift()->AsIntConstant();
2322
2323 locations->SetInAt(0, Location::RequiresRegister());
2324 // For byte case we don't need to shift the index variable so we can encode the data offset into
2325 // ADD instruction. For other cases we prefer the data_offset to be in register; that will hoist
2326 // data offset constant generation out of the loop and reduce the critical path length in the
2327 // loop.
2328 locations->SetInAt(1, shift->GetValue() == 0
2329 ? Location::ConstantLocation(instruction->GetOffset()->AsIntConstant())
2330 : Location::RequiresRegister());
2331 locations->SetInAt(2, Location::ConstantLocation(shift));
2332 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2333}
2334
2335void InstructionCodeGeneratorARM64::VisitIntermediateAddressIndex(
2336 HIntermediateAddressIndex* instruction) {
2337 Register index_reg = InputRegisterAt(instruction, 0);
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002338 uint32_t shift = Int64FromLocation(instruction->GetLocations()->InAt(2));
Artem Serove1811ed2017-04-27 16:50:47 +01002339 uint32_t offset = instruction->GetOffset()->AsIntConstant()->GetValue();
2340
2341 if (shift == 0) {
2342 __ Add(OutputRegister(instruction), index_reg, offset);
2343 } else {
2344 Register offset_reg = InputRegisterAt(instruction, 1);
2345 __ Add(OutputRegister(instruction), offset_reg, Operand(index_reg, LSL, shift));
2346 }
2347}
2348
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002349void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002350 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002351 new (GetGraph()->GetAllocator()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002352 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2353 if (instr->GetOpKind() == HInstruction::kSub &&
2354 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002355 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002356 // Don't allocate register for Mneg instruction.
2357 } else {
2358 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2359 Location::RequiresRegister());
2360 }
2361 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2362 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002363 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2364}
2365
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002366void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002367 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002368 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2369 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002370
2371 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2372 // This fixup should be carried out for all multiply-accumulate instructions:
2373 // madd, msub, smaddl, smsubl, umaddl and umsubl.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002374 if (instr->GetType() == DataType::Type::kInt64 &&
Alexandre Rames418318f2015-11-20 15:55:47 +00002375 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2376 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002377 vixl::aarch64::Instruction* prev =
2378 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002379 if (prev->IsLoadOrStore()) {
2380 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002381 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002382 __ nop();
2383 }
2384 }
2385
2386 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002387 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002388 __ Madd(res, mul_left, mul_right, accumulator);
2389 } else {
2390 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002391 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002392 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002393 __ Mneg(res, mul_left, mul_right);
2394 } else {
2395 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2396 __ Msub(res, mul_left, mul_right, accumulator);
2397 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002398 }
2399}
2400
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002401void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002402 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002403 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002404 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002405 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2406 object_array_get_with_read_barrier
2407 ? LocationSummary::kCallOnSlowPath
2408 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002409 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002410 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002411 if (instruction->GetIndex()->IsConstant()) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002412 // Array loads with constant index are treated as field loads.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002413 // We need a temporary register for the read barrier load in
2414 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
2415 // only if the offset is too big.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002416 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
2417 uint32_t index = instruction->GetIndex()->AsIntConstant()->GetValue();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002418 offset += index << DataType::SizeShift(DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002419 if (offset >= kReferenceLoadMinFarOffset) {
2420 locations->AddTemp(FixedTempLocation());
2421 }
Artem Serov0806f582018-10-11 20:14:20 +01002422 } else if (!instruction->GetArray()->IsIntermediateAddress()) {
Vladimir Marko008e09f32018-08-06 15:42:43 +01002423 // We need a non-scratch temporary for the array data pointer in
Artem Serov0806f582018-10-11 20:14:20 +01002424 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier() for the case with no
2425 // intermediate address.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002426 locations->AddTemp(Location::RequiresRegister());
2427 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002428 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002429 locations->SetInAt(0, Location::RequiresRegister());
2430 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002431 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002432 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2433 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002434 // The output overlaps in the case of an object array get with
2435 // read barriers enabled: we do not want the move to overwrite the
2436 // array's location, as we need it to emit the read barrier.
2437 locations->SetOut(
2438 Location::RequiresRegister(),
2439 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002440 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002441}
2442
2443void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002444 DataType::Type type = instruction->GetType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002445 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002446 LocationSummary* locations = instruction->GetLocations();
2447 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002448 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002449 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002450 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2451 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002452 MacroAssembler* masm = GetVIXLAssembler();
2453 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002454
Artem Serov0806f582018-10-11 20:14:20 +01002455 // The non-Baker read barrier instrumentation of object ArrayGet instructions
Roland Levillain19c54192016-11-04 13:44:09 +00002456 // does not support the HIntermediateAddress instruction.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002457 DCHECK(!((type == DataType::Type::kReference) &&
Roland Levillain19c54192016-11-04 13:44:09 +00002458 instruction->GetArray()->IsIntermediateAddress() &&
Artem Serov0806f582018-10-11 20:14:20 +01002459 kEmitCompilerReadBarrier &&
2460 !kUseBakerReadBarrier));
Roland Levillain19c54192016-11-04 13:44:09 +00002461
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002462 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00002463 // Object ArrayGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002464 // Note that a potential implicit null check is handled in the
2465 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
Vladimir Marko66d691d2017-04-07 17:53:39 +01002466 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002467 if (index.IsConstant()) {
Artem Serov0806f582018-10-11 20:14:20 +01002468 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002469 // Array load with a constant index can be treated as a field load.
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002470 offset += Int64FromLocation(index) << DataType::SizeShift(type);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002471 Location maybe_temp =
2472 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
2473 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2474 out,
2475 obj.W(),
2476 offset,
2477 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08002478 /* needs_null_check= */ false,
2479 /* use_load_acquire= */ false);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002480 } else {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002481 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08002482 instruction, out, obj.W(), offset, index, /* needs_null_check= */ false);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002483 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002484 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002485 // General case.
2486 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002487 Register length;
2488 if (maybe_compressed_char_at) {
2489 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2490 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002491 {
2492 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2493 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2494
2495 if (instruction->GetArray()->IsIntermediateAddress()) {
2496 DCHECK_LT(count_offset, offset);
2497 int64_t adjusted_offset =
2498 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2499 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2500 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2501 } else {
2502 __ Ldr(length, HeapOperand(obj, count_offset));
2503 }
2504 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002505 }
jessicahandojo05765752016-09-09 19:01:32 -07002506 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002507 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002508 if (maybe_compressed_char_at) {
2509 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002510 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2511 "Expecting 0=compressed, 1=uncompressed");
2512 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002513 __ Ldrb(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002514 HeapOperand(obj, offset + Int64FromLocation(index)));
jessicahandojo05765752016-09-09 19:01:32 -07002515 __ B(&done);
2516 __ Bind(&uncompressed_load);
2517 __ Ldrh(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002518 HeapOperand(obj, offset + (Int64FromLocation(index) << 1)));
jessicahandojo05765752016-09-09 19:01:32 -07002519 __ Bind(&done);
2520 } else {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002521 offset += Int64FromLocation(index) << DataType::SizeShift(type);
jessicahandojo05765752016-09-09 19:01:32 -07002522 source = HeapOperand(obj, offset);
2523 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002524 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002525 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002526 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002527 // We do not need to compute the intermediate address from the array: the
2528 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002529 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002530 if (kIsDebugBuild) {
Artem Serov0806f582018-10-11 20:14:20 +01002531 HIntermediateAddress* interm_addr = instruction->GetArray()->AsIntermediateAddress();
2532 DCHECK_EQ(interm_addr->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
Roland Levillain44015862016-01-22 11:47:17 +00002533 }
2534 temp = obj;
2535 } else {
2536 __ Add(temp, obj, offset);
2537 }
jessicahandojo05765752016-09-09 19:01:32 -07002538 if (maybe_compressed_char_at) {
2539 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002540 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2541 "Expecting 0=compressed, 1=uncompressed");
2542 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002543 __ Ldrb(Register(OutputCPURegister(instruction)),
2544 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2545 __ B(&done);
2546 __ Bind(&uncompressed_load);
2547 __ Ldrh(Register(OutputCPURegister(instruction)),
2548 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2549 __ Bind(&done);
2550 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002551 source = HeapOperand(temp, XRegisterFrom(index), LSL, DataType::SizeShift(type));
jessicahandojo05765752016-09-09 19:01:32 -07002552 }
Roland Levillain44015862016-01-22 11:47:17 +00002553 }
jessicahandojo05765752016-09-09 19:01:32 -07002554 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002555 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2556 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002557 codegen_->Load(type, OutputCPURegister(instruction), source);
2558 codegen_->MaybeRecordImplicitNullCheck(instruction);
2559 }
Roland Levillain44015862016-01-22 11:47:17 +00002560
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002561 if (type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002562 static_assert(
2563 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2564 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2565 Location obj_loc = locations->InAt(0);
2566 if (index.IsConstant()) {
2567 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2568 } else {
2569 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2570 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002571 }
Roland Levillain4d027112015-07-01 15:41:14 +01002572 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002573}
2574
Alexandre Rames5319def2014-10-23 10:03:10 +01002575void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002576 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002577 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002578 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002579}
2580
2581void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002582 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002583 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002584 {
2585 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2586 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2587 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2588 codegen_->MaybeRecordImplicitNullCheck(instruction);
2589 }
jessicahandojo05765752016-09-09 19:01:32 -07002590 // Mask out compression flag from String's array length.
2591 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002592 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002593 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002594}
2595
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002596void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002597 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002598
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002599 bool needs_type_check = instruction->NeedsTypeCheck();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002600 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002601 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002602 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002603 locations->SetInAt(0, Location::RequiresRegister());
2604 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002605 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2606 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002607 } else if (DataType::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002608 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002609 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002610 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002611 }
2612}
2613
2614void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002615 DataType::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002616 LocationSummary* locations = instruction->GetLocations();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002617 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002618 bool needs_write_barrier =
2619 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002620
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002621 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002622 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002623 CPURegister source = value;
2624 Location index = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002625 size_t offset = mirror::Array::DataOffset(DataType::Size(value_type)).Uint32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002626 MemOperand destination = HeapOperand(array);
2627 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002628
2629 if (!needs_write_barrier) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002630 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002631 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002632 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002633 destination = HeapOperand(array, offset);
2634 } else {
2635 UseScratchRegisterScope temps(masm);
2636 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002637 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002638 // We do not need to compute the intermediate address from the array: the
2639 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002640 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002641 if (kIsDebugBuild) {
Artem Serov0806f582018-10-11 20:14:20 +01002642 HIntermediateAddress* interm_addr = instruction->GetArray()->AsIntermediateAddress();
2643 DCHECK(interm_addr->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002644 }
2645 temp = array;
2646 } else {
2647 __ Add(temp, array, offset);
2648 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002649 destination = HeapOperand(temp,
2650 XRegisterFrom(index),
2651 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002652 DataType::SizeShift(value_type));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002653 }
Artem Serov914d7a82017-02-07 14:33:49 +00002654 {
2655 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2656 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2657 codegen_->Store(value_type, value, destination);
2658 codegen_->MaybeRecordImplicitNullCheck(instruction);
2659 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002660 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002661 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002662
2663 bool can_value_be_null = instruction->GetValueCanBeNull();
2664 vixl::aarch64::Label do_store;
2665 if (can_value_be_null) {
2666 __ Cbz(Register(value), &do_store);
2667 }
2668
Vladimir Marko0dda8c82019-05-16 12:47:40 +00002669 SlowPathCodeARM64* slow_path = nullptr;
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002670 if (needs_type_check) {
2671 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathARM64(instruction);
2672 codegen_->AddSlowPath(slow_path);
2673
2674 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2675 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2676 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2677
Alexandre Rames97833a02015-04-16 15:07:12 +01002678 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002679 Register temp = temps.AcquireSameSizeAs(array);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002680 Register temp2 = temps.AcquireSameSizeAs(array);
2681
2682 // Note that when Baker read barriers are enabled, the type
2683 // checks are performed without read barriers. This is fine,
2684 // even in the case where a class object is in the from-space
2685 // after the flip, as a comparison involving such a type would
2686 // not produce a false positive; it may of course produce a
2687 // false negative, in which case we would take the ArraySet
2688 // slow path.
2689
2690 // /* HeapReference<Class> */ temp = array->klass_
2691 {
2692 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2693 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2694 __ Ldr(temp, HeapOperand(array, class_offset));
2695 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames97833a02015-04-16 15:07:12 +01002696 }
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002697 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Alexandre Rames97833a02015-04-16 15:07:12 +01002698
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002699 // /* HeapReference<Class> */ temp = temp->component_type_
2700 __ Ldr(temp, HeapOperand(temp, component_offset));
2701 // /* HeapReference<Class> */ temp2 = value->klass_
2702 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2703 // If heap poisoning is enabled, no need to unpoison `temp`
2704 // nor `temp2`, as we are comparing two poisoned references.
2705 __ Cmp(temp, temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002706
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002707 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2708 vixl::aarch64::Label do_put;
2709 __ B(eq, &do_put);
2710 // If heap poisoning is enabled, the `temp` reference has
2711 // not been unpoisoned yet; unpoison it now.
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002712 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01002713
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002714 // /* HeapReference<Class> */ temp = temp->super_class_
2715 __ Ldr(temp, HeapOperand(temp, super_offset));
2716 // If heap poisoning is enabled, no need to unpoison
2717 // `temp`, as we are comparing against null below.
2718 __ Cbnz(temp, slow_path->GetEntryLabel());
2719 __ Bind(&do_put);
Vladimir Markod1ef8732017-04-18 13:55:13 +01002720 } else {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002721 __ B(ne, slow_path->GetEntryLabel());
Vladimir Marko0dda8c82019-05-16 12:47:40 +00002722 }
2723 }
2724
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002725 codegen_->MarkGCCard(array, value.W(), /* value_can_be_null= */ false);
Vladimir Marko0dda8c82019-05-16 12:47:40 +00002726
Vladimir Marko8fa839c2019-05-16 12:50:47 +00002727 if (can_value_be_null) {
2728 DCHECK(do_store.IsLinked());
2729 __ Bind(&do_store);
2730 }
2731
2732 UseScratchRegisterScope temps(masm);
2733 if (kPoisonHeapReferences) {
2734 Register temp_source = temps.AcquireSameSizeAs(array);
2735 DCHECK(value.IsW());
2736 __ Mov(temp_source, value.W());
2737 GetAssembler()->PoisonHeapReference(temp_source);
2738 source = temp_source;
2739 }
2740
2741 if (index.IsConstant()) {
2742 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
2743 destination = HeapOperand(array, offset);
2744 } else {
2745 Register temp_base = temps.AcquireSameSizeAs(array);
2746 __ Add(temp_base, array, offset);
2747 destination = HeapOperand(temp_base,
2748 XRegisterFrom(index),
2749 LSL,
2750 DataType::SizeShift(value_type));
2751 }
2752
2753 {
2754 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2755 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2756 __ Str(source, destination);
2757
2758 if (can_value_be_null || !needs_type_check) {
2759 codegen_->MaybeRecordImplicitNullCheck(instruction);
2760 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00002761 }
2762
2763 if (slow_path != nullptr) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002764 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002765 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002766 }
2767}
2768
Alexandre Rames67555f72014-11-18 10:55:16 +00002769void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002770 RegisterSet caller_saves = RegisterSet::Empty();
2771 InvokeRuntimeCallingConvention calling_convention;
2772 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2773 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
2774 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Georgia Kouvelibe530852019-01-17 10:46:41 +00002775
2776 // If both index and length are constant, we can check the bounds statically and
2777 // generate code accordingly. We want to make sure we generate constant locations
2778 // in that case, regardless of whether they are encodable in the comparison or not.
2779 HInstruction* index = instruction->InputAt(0);
2780 HInstruction* length = instruction->InputAt(1);
2781 bool both_const = index->IsConstant() && length->IsConstant();
2782 locations->SetInAt(0, both_const
2783 ? Location::ConstantLocation(index->AsConstant())
2784 : ARM64EncodableConstantOrRegister(index, instruction));
2785 locations->SetInAt(1, both_const
2786 ? Location::ConstantLocation(length->AsConstant())
2787 : ARM64EncodableConstantOrRegister(length, instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002788}
2789
2790void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Georgia Kouvelibe530852019-01-17 10:46:41 +00002791 LocationSummary* locations = instruction->GetLocations();
2792 Location index_loc = locations->InAt(0);
2793 Location length_loc = locations->InAt(1);
2794
2795 int cmp_first_input = 0;
2796 int cmp_second_input = 1;
2797 Condition cond = hs;
2798
2799 if (index_loc.IsConstant()) {
2800 int64_t index = Int64FromLocation(index_loc);
2801 if (length_loc.IsConstant()) {
2802 int64_t length = Int64FromLocation(length_loc);
2803 if (index < 0 || index >= length) {
2804 BoundsCheckSlowPathARM64* slow_path =
2805 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARM64(instruction);
2806 codegen_->AddSlowPath(slow_path);
2807 __ B(slow_path->GetEntryLabel());
2808 } else {
2809 // BCE will remove the bounds check if we are guaranteed to pass.
2810 // However, some optimization after BCE may have generated this, and we should not
2811 // generate a bounds check if it is a valid range.
2812 }
2813 return;
2814 }
2815 // Only the index is constant: change the order of the operands and commute the condition
2816 // so we can use an immediate constant for the index (only the second input to a cmp
2817 // instruction can be an immediate).
2818 cmp_first_input = 1;
2819 cmp_second_input = 0;
2820 cond = ls;
2821 }
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002822 BoundsCheckSlowPathARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002823 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARM64(instruction);
Georgia Kouvelibe530852019-01-17 10:46:41 +00002824 __ Cmp(InputRegisterAt(instruction, cmp_first_input),
2825 InputOperandAt(instruction, cmp_second_input));
Alexandre Rames67555f72014-11-18 10:55:16 +00002826 codegen_->AddSlowPath(slow_path);
Georgia Kouvelibe530852019-01-17 10:46:41 +00002827 __ B(slow_path->GetEntryLabel(), cond);
Alexandre Rames67555f72014-11-18 10:55:16 +00002828}
2829
Alexandre Rames67555f72014-11-18 10:55:16 +00002830void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2831 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002832 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexandre Rames67555f72014-11-18 10:55:16 +00002833 locations->SetInAt(0, Location::RequiresRegister());
2834 if (check->HasUses()) {
2835 locations->SetOut(Location::SameAsFirstInput());
2836 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01002837 // Rely on the type initialization to save everything we need.
2838 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexandre Rames67555f72014-11-18 10:55:16 +00002839}
2840
2841void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2842 // We assume the class is not null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01002843 SlowPathCodeARM64* slow_path =
2844 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(check->GetLoadClass(), check);
Alexandre Rames67555f72014-11-18 10:55:16 +00002845 codegen_->AddSlowPath(slow_path);
2846 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2847}
2848
Roland Levillain1a653882016-03-18 18:05:57 +00002849static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2850 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2851 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2852}
2853
2854void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01002855 VRegister lhs_reg = InputFPRegisterAt(instruction, 0);
Roland Levillain1a653882016-03-18 18:05:57 +00002856 Location rhs_loc = instruction->GetLocations()->InAt(1);
2857 if (rhs_loc.IsConstant()) {
2858 // 0.0 is the only immediate that can be encoded directly in
2859 // an FCMP instruction.
2860 //
2861 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2862 // specify that in a floating-point comparison, positive zero
2863 // and negative zero are considered equal, so we can use the
2864 // literal 0.0 for both cases here.
2865 //
2866 // Note however that some methods (Float.equal, Float.compare,
2867 // Float.compareTo, Double.equal, Double.compare,
2868 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2869 // StrictMath.min) consider 0.0 to be (strictly) greater than
2870 // -0.0. So if we ever translate calls to these methods into a
2871 // HCompare instruction, we must handle the -0.0 case with
2872 // care here.
2873 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2874 __ Fcmp(lhs_reg, 0.0);
2875 } else {
2876 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2877 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002878}
2879
Serban Constantinescu02164b32014-11-13 14:05:07 +00002880void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002881 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002882 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002883 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002884 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002885 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002886 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002887 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002888 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002889 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002890 case DataType::Type::kInt32:
2891 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002892 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002893 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002894 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2895 break;
2896 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002897 case DataType::Type::kFloat32:
2898 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002899 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002900 locations->SetInAt(1,
2901 IsFloatingPointZeroConstant(compare->InputAt(1))
2902 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2903 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002904 locations->SetOut(Location::RequiresRegister());
2905 break;
2906 }
2907 default:
2908 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2909 }
2910}
2911
2912void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002913 DataType::Type in_type = compare->InputAt(0)->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002914
2915 // 0 if: left == right
2916 // 1 if: left > right
2917 // -1 if: left < right
2918 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002919 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002920 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002921 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002922 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002923 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002924 case DataType::Type::kInt32:
2925 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002926 Register result = OutputRegister(compare);
2927 Register left = InputRegisterAt(compare, 0);
2928 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002929 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002930 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2931 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002932 break;
2933 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002934 case DataType::Type::kFloat32:
2935 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002936 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002937 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002938 __ Cset(result, ne);
2939 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002940 break;
2941 }
2942 default:
2943 LOG(FATAL) << "Unimplemented compare type " << in_type;
2944 }
2945}
2946
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002947void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002948 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002949
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002950 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002951 locations->SetInAt(0, Location::RequiresFpuRegister());
2952 locations->SetInAt(1,
2953 IsFloatingPointZeroConstant(instruction->InputAt(1))
2954 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2955 : Location::RequiresFpuRegister());
2956 } else {
2957 // Integer cases.
2958 locations->SetInAt(0, Location::RequiresRegister());
2959 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2960 }
2961
David Brazdilb3e773e2016-01-26 11:28:37 +00002962 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002963 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002964 }
2965}
2966
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002967void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002968 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002969 return;
2970 }
2971
2972 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002973 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002974 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002975
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002976 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002977 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002978 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002979 } else {
2980 // Integer cases.
2981 Register lhs = InputRegisterAt(instruction, 0);
2982 Operand rhs = InputOperandAt(instruction, 1);
2983 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002984 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002985 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002986}
2987
2988#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2989 M(Equal) \
2990 M(NotEqual) \
2991 M(LessThan) \
2992 M(LessThanOrEqual) \
2993 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002994 M(GreaterThanOrEqual) \
2995 M(Below) \
2996 M(BelowOrEqual) \
2997 M(Above) \
2998 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002999#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003000void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
3001void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01003002FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00003003#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01003004#undef FOR_EACH_CONDITION_INSTRUCTION
3005
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003006void InstructionCodeGeneratorARM64::GenerateIntDivForPower2Denom(HDiv* instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003007 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003008 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003009 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
3010
3011 Register out = OutputRegister(instruction);
3012 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01003013
3014 if (abs_imm == 2) {
3015 int bits = DataType::Size(instruction->GetResultType()) * kBitsPerByte;
3016 __ Add(out, dividend, Operand(dividend, LSR, bits - 1));
3017 } else {
3018 UseScratchRegisterScope temps(GetVIXLAssembler());
3019 Register temp = temps.AcquireSameSizeAs(out);
3020 __ Add(temp, dividend, abs_imm - 1);
3021 __ Cmp(dividend, 0);
3022 __ Csel(out, temp, dividend, lt);
3023 }
3024
Zheng Xuc6667102015-05-15 16:08:45 +08003025 int ctz_imm = CTZ(abs_imm);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003026 if (imm > 0) {
3027 __ Asr(out, out, ctz_imm);
Zheng Xuc6667102015-05-15 16:08:45 +08003028 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003029 __ Neg(out, Operand(out, ASR, ctz_imm));
Zheng Xuc6667102015-05-15 16:08:45 +08003030 }
3031}
3032
3033void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3034 DCHECK(instruction->IsDiv() || instruction->IsRem());
3035
3036 LocationSummary* locations = instruction->GetLocations();
3037 Location second = locations->InAt(1);
3038 DCHECK(second.IsConstant());
3039
3040 Register out = OutputRegister(instruction);
3041 Register dividend = InputRegisterAt(instruction, 0);
3042 int64_t imm = Int64FromConstant(second.GetConstant());
3043
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003044 DataType::Type type = instruction->GetResultType();
3045 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Zheng Xuc6667102015-05-15 16:08:45 +08003046
3047 int64_t magic;
3048 int shift;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003049 CalculateMagicAndShiftForDivRem(
Andreas Gampe3db70682018-12-26 15:12:03 -08003050 imm, /* is_long= */ type == DataType::Type::kInt64, &magic, &shift);
Zheng Xuc6667102015-05-15 16:08:45 +08003051
3052 UseScratchRegisterScope temps(GetVIXLAssembler());
3053 Register temp = temps.AcquireSameSizeAs(out);
3054
3055 // temp = get_high(dividend * magic)
3056 __ Mov(temp, magic);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003057 if (type == DataType::Type::kInt64) {
Zheng Xuc6667102015-05-15 16:08:45 +08003058 __ Smulh(temp, dividend, temp);
3059 } else {
3060 __ Smull(temp.X(), dividend, temp);
3061 __ Lsr(temp.X(), temp.X(), 32);
3062 }
3063
3064 if (imm > 0 && magic < 0) {
3065 __ Add(temp, temp, dividend);
3066 } else if (imm < 0 && magic > 0) {
3067 __ Sub(temp, temp, dividend);
3068 }
3069
3070 if (shift != 0) {
3071 __ Asr(temp, temp, shift);
3072 }
3073
3074 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003075 __ Sub(out, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003076 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003077 __ Sub(temp, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003078 // TODO: Strength reduction for msub.
3079 Register temp_imm = temps.AcquireSameSizeAs(out);
3080 __ Mov(temp_imm, imm);
3081 __ Msub(out, temp, temp_imm, dividend);
3082 }
3083}
3084
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003085void InstructionCodeGeneratorARM64::GenerateIntDivForConstDenom(HDiv *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003086 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Zheng Xuc6667102015-05-15 16:08:45 +08003087
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003088 if (imm == 0) {
3089 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3090 return;
3091 }
Zheng Xuc6667102015-05-15 16:08:45 +08003092
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003093 if (IsPowerOfTwo(AbsOrMin(imm))) {
3094 GenerateIntDivForPower2Denom(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003095 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003096 // Cases imm == -1 or imm == 1 are handled by InstructionSimplifier.
3097 DCHECK(imm < -2 || imm > 2) << imm;
3098 GenerateDivRemWithAnyConstant(instruction);
3099 }
3100}
3101
3102void InstructionCodeGeneratorARM64::GenerateIntDiv(HDiv *instruction) {
3103 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
3104 << instruction->GetResultType();
3105
3106 if (instruction->GetLocations()->InAt(1).IsConstant()) {
3107 GenerateIntDivForConstDenom(instruction);
3108 } else {
3109 Register out = OutputRegister(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003110 Register dividend = InputRegisterAt(instruction, 0);
3111 Register divisor = InputRegisterAt(instruction, 1);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003112 __ Sdiv(out, dividend, divisor);
Zheng Xuc6667102015-05-15 16:08:45 +08003113 }
3114}
3115
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003116void LocationsBuilderARM64::VisitDiv(HDiv* div) {
3117 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003118 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003119 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003120 case DataType::Type::kInt32:
3121 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003122 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003123 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003124 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3125 break;
3126
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003127 case DataType::Type::kFloat32:
3128 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003129 locations->SetInAt(0, Location::RequiresFpuRegister());
3130 locations->SetInAt(1, Location::RequiresFpuRegister());
3131 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3132 break;
3133
3134 default:
3135 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3136 }
3137}
3138
3139void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003140 DataType::Type type = div->GetResultType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003141 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003142 case DataType::Type::kInt32:
3143 case DataType::Type::kInt64:
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003144 GenerateIntDiv(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003145 break;
3146
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003147 case DataType::Type::kFloat32:
3148 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003149 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3150 break;
3151
3152 default:
3153 LOG(FATAL) << "Unexpected div type " << type;
3154 }
3155}
3156
Alexandre Rames67555f72014-11-18 10:55:16 +00003157void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003158 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003159 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003160}
3161
3162void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3163 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003164 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003165 codegen_->AddSlowPath(slow_path);
3166 Location value = instruction->GetLocations()->InAt(0);
3167
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003168 DataType::Type type = instruction->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +00003169
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003170 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003171 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Elliott Hughesc1896c92018-11-29 11:33:18 -08003172 UNREACHABLE();
Alexandre Rames3e69f162014-12-10 10:36:50 +00003173 }
3174
Alexandre Rames67555f72014-11-18 10:55:16 +00003175 if (value.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003176 int64_t divisor = Int64FromLocation(value);
Alexandre Rames67555f72014-11-18 10:55:16 +00003177 if (divisor == 0) {
3178 __ B(slow_path->GetEntryLabel());
3179 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003180 // A division by a non-null constant is valid. We don't need to perform
3181 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003182 }
3183 } else {
3184 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3185 }
3186}
3187
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003188void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3189 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003190 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003191 locations->SetOut(Location::ConstantLocation(constant));
3192}
3193
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003194void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3195 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003196 // Will be generated at use site.
3197}
3198
Alexandre Rames5319def2014-10-23 10:03:10 +01003199void LocationsBuilderARM64::VisitExit(HExit* exit) {
3200 exit->SetLocations(nullptr);
3201}
3202
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003203void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003204}
3205
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003206void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3207 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003208 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003209 locations->SetOut(Location::ConstantLocation(constant));
3210}
3211
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003212void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003213 // Will be generated at use site.
3214}
3215
David Brazdilfc6a86a2015-06-26 10:33:45 +00003216void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003217 if (successor->IsExitBlock()) {
3218 DCHECK(got->GetPrevious()->AlwaysThrows());
3219 return; // no code needed
3220 }
3221
Serban Constantinescu02164b32014-11-13 14:05:07 +00003222 HBasicBlock* block = got->GetBlock();
3223 HInstruction* previous = got->GetPrevious();
3224 HLoopInformation* info = block->GetLoopInformation();
3225
David Brazdil46e2a392015-03-16 17:31:52 +00003226 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00003227 codegen_->MaybeIncrementHotness(/* is_frame_entry= */ false);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003228 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3229 return;
3230 }
3231 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3232 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
Andreas Gampe3db70682018-12-26 15:12:03 -08003233 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003234 }
3235 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003236 __ B(codegen_->GetLabelOf(successor));
3237 }
3238}
3239
David Brazdilfc6a86a2015-06-26 10:33:45 +00003240void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3241 got->SetLocations(nullptr);
3242}
3243
3244void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3245 HandleGoto(got, got->GetSuccessor());
3246}
3247
3248void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3249 try_boundary->SetLocations(nullptr);
3250}
3251
3252void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3253 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3254 if (!successor->IsExitBlock()) {
3255 HandleGoto(try_boundary, successor);
3256 }
3257}
3258
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003259void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003260 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003261 vixl::aarch64::Label* true_target,
3262 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003263 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003264
David Brazdil0debae72015-11-12 18:37:00 +00003265 if (true_target == nullptr && false_target == nullptr) {
3266 // Nothing to do. The code always falls through.
3267 return;
3268 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003269 // Constant condition, statically compared against "true" (integer value 1).
3270 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003271 if (true_target != nullptr) {
3272 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003273 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003274 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003275 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003276 if (false_target != nullptr) {
3277 __ B(false_target);
3278 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003279 }
David Brazdil0debae72015-11-12 18:37:00 +00003280 return;
3281 }
3282
3283 // The following code generates these patterns:
3284 // (1) true_target == nullptr && false_target != nullptr
3285 // - opposite condition true => branch to false_target
3286 // (2) true_target != nullptr && false_target == nullptr
3287 // - condition true => branch to true_target
3288 // (3) true_target != nullptr && false_target != nullptr
3289 // - condition true => branch to true_target
3290 // - branch to false_target
3291 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003292 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003293 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003294 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003295 if (true_target == nullptr) {
3296 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3297 } else {
3298 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3299 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003300 } else {
3301 // The condition instruction has not been materialized, use its inputs as
3302 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003303 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003304
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003305 DataType::Type type = condition->InputAt(0)->GetType();
3306 if (DataType::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003307 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003308 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003309 IfCondition opposite_condition = condition->GetOppositeCondition();
3310 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003311 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003312 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003313 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003314 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003315 // Integer cases.
3316 Register lhs = InputRegisterAt(condition, 0);
3317 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003318
3319 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003320 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003321 if (true_target == nullptr) {
3322 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3323 non_fallthrough_target = false_target;
3324 } else {
3325 arm64_cond = ARM64Condition(condition->GetCondition());
3326 non_fallthrough_target = true_target;
3327 }
3328
Aart Bik086d27e2016-01-20 17:02:00 -08003329 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003330 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003331 switch (arm64_cond) {
3332 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003333 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003334 break;
3335 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003336 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003337 break;
3338 case lt:
3339 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003340 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003341 break;
3342 case ge:
3343 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003344 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003345 break;
3346 default:
3347 // Without the `static_cast` the compiler throws an error for
3348 // `-Werror=sign-promo`.
3349 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3350 }
3351 } else {
3352 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003353 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003354 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003355 }
3356 }
David Brazdil0debae72015-11-12 18:37:00 +00003357
3358 // If neither branch falls through (case 3), the conditional branch to `true_target`
3359 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3360 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003361 __ B(false_target);
3362 }
3363}
3364
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003365void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003366 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003367 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003368 locations->SetInAt(0, Location::RequiresRegister());
3369 }
3370}
3371
3372void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003373 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3374 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003375 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3376 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3377 true_target = nullptr;
3378 }
3379 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3380 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3381 false_target = nullptr;
3382 }
Andreas Gampe3db70682018-12-26 15:12:03 -08003383 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003384}
3385
3386void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003387 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003388 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01003389 InvokeRuntimeCallingConvention calling_convention;
3390 RegisterSet caller_saves = RegisterSet::Empty();
3391 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3392 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00003393 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003394 locations->SetInAt(0, Location::RequiresRegister());
3395 }
3396}
3397
3398void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003399 SlowPathCodeARM64* slow_path =
3400 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003401 GenerateTestAndBranch(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08003402 /* condition_input_index= */ 0,
David Brazdil0debae72015-11-12 18:37:00 +00003403 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08003404 /* false_target= */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003405}
3406
Mingyao Yang063fc772016-08-02 11:02:54 -07003407void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003408 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07003409 LocationSummary(flag, LocationSummary::kNoCall);
3410 locations->SetOut(Location::RequiresRegister());
3411}
3412
3413void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3414 __ Ldr(OutputRegister(flag),
3415 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3416}
3417
David Brazdilc0b601b2016-02-08 14:20:45 +00003418static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3419 return condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003420 DataType::IsFloatingPointType(condition->InputAt(0)->GetType());
David Brazdilc0b601b2016-02-08 14:20:45 +00003421}
3422
Alexandre Rames880f1192016-06-13 16:04:50 +01003423static inline Condition GetConditionForSelect(HCondition* condition) {
3424 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003425 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3426 : ARM64Condition(cond);
3427}
3428
David Brazdil74eb1b22015-12-14 11:44:01 +00003429void LocationsBuilderARM64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003430 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003431 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003432 locations->SetInAt(0, Location::RequiresFpuRegister());
3433 locations->SetInAt(1, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08003434 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames880f1192016-06-13 16:04:50 +01003435 } else {
3436 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3437 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3438 bool is_true_value_constant = cst_true_value != nullptr;
3439 bool is_false_value_constant = cst_false_value != nullptr;
3440 // Ask VIXL whether we should synthesize constants in registers.
3441 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3442 Operand true_op = is_true_value_constant ?
3443 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3444 Operand false_op = is_false_value_constant ?
3445 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3446 bool true_value_in_register = false;
3447 bool false_value_in_register = false;
3448 MacroAssembler::GetCselSynthesisInformation(
3449 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3450 true_value_in_register |= !is_true_value_constant;
3451 false_value_in_register |= !is_false_value_constant;
3452
3453 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3454 : Location::ConstantLocation(cst_true_value));
3455 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3456 : Location::ConstantLocation(cst_false_value));
Donghui Bai426b49c2016-11-08 14:55:38 +08003457 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
David Brazdil74eb1b22015-12-14 11:44:01 +00003458 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003459
David Brazdil74eb1b22015-12-14 11:44:01 +00003460 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3461 locations->SetInAt(2, Location::RequiresRegister());
3462 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003463}
3464
3465void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003466 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003467 Condition csel_cond;
3468
3469 if (IsBooleanValueOrMaterializedCondition(cond)) {
3470 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003471 // Use the condition flags set by the previous instruction.
3472 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003473 } else {
3474 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003475 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003476 }
3477 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003478 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003479 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003480 } else {
3481 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003482 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003483 }
3484
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003485 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003486 __ Fcsel(OutputFPRegister(select),
3487 InputFPRegisterAt(select, 1),
3488 InputFPRegisterAt(select, 0),
3489 csel_cond);
3490 } else {
3491 __ Csel(OutputRegister(select),
3492 InputOperandAt(select, 1),
3493 InputOperandAt(select, 0),
3494 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003495 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003496}
3497
David Srbecky0cf44932015-12-09 14:09:59 +00003498void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003499 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00003500}
3501
David Srbeckyd28f4a02016-03-14 17:14:24 +00003502void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3503 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003504}
3505
3506void CodeGeneratorARM64::GenerateNop() {
3507 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003508}
3509
Alexandre Rames5319def2014-10-23 10:03:10 +01003510void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00003511 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003512}
3513
3514void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003515 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003516}
3517
3518void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003519 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003520}
3521
3522void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003523 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003524}
3525
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003526// Temp is used for read barrier.
3527static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3528 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003529 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003530 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3531 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3532 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3533 return 1;
3534 }
3535 return 0;
3536}
3537
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003538// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003539// interface pointer, one for loading the current interface.
3540// The other checks have one temp for loading the object's class.
3541static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3542 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3543 return 3;
3544 }
3545 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003546}
3547
Alexandre Rames67555f72014-11-18 10:55:16 +00003548void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003549 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003550 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003551 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003552 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003553 case TypeCheckKind::kExactCheck:
3554 case TypeCheckKind::kAbstractClassCheck:
3555 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00003556 case TypeCheckKind::kArrayObjectCheck: {
3557 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
3558 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
3559 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003560 break;
Vladimir Marko87584542017-12-12 17:47:52 +00003561 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003562 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003563 case TypeCheckKind::kUnresolvedCheck:
3564 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003565 call_kind = LocationSummary::kCallOnSlowPath;
3566 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00003567 case TypeCheckKind::kBitstringCheck:
3568 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003569 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003570
Vladimir Markoca6fff82017-10-03 14:49:14 +01003571 LocationSummary* locations =
3572 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003573 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003574 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003575 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003576 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003577 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3578 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3579 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3580 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3581 } else {
3582 locations->SetInAt(1, Location::RequiresRegister());
3583 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003584 // The "out" register is used as a temporary, so it overlaps with the inputs.
3585 // Note that TypeCheckSlowPathARM64 uses this register too.
3586 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003587 // Add temps if necessary for read barriers.
3588 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003589}
3590
3591void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003592 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003593 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003594 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003595 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00003596 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
3597 ? Register()
3598 : InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003599 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003600 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003601 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3602 DCHECK_LE(num_temps, 1u);
3603 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003604 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3605 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3606 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3607 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003608
Scott Wakeling97c72b72016-06-24 16:19:36 +01003609 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003610 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003611
3612 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003613 // Avoid null check if we know `obj` is not null.
3614 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003615 __ Cbz(obj, &zero);
3616 }
3617
Roland Levillain44015862016-01-22 11:47:17 +00003618 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003619 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003620 ReadBarrierOption read_barrier_option =
3621 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003622 // /* HeapReference<Class> */ out = obj->klass_
3623 GenerateReferenceLoadTwoRegisters(instruction,
3624 out_loc,
3625 obj_loc,
3626 class_offset,
3627 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003628 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003629 __ Cmp(out, cls);
3630 __ Cset(out, eq);
3631 if (zero.IsLinked()) {
3632 __ B(&done);
3633 }
3634 break;
3635 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003636
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003637 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003638 ReadBarrierOption read_barrier_option =
3639 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003640 // /* HeapReference<Class> */ out = obj->klass_
3641 GenerateReferenceLoadTwoRegisters(instruction,
3642 out_loc,
3643 obj_loc,
3644 class_offset,
3645 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003646 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003647 // If the class is abstract, we eagerly fetch the super class of the
3648 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003649 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003650 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003651 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003652 GenerateReferenceLoadOneRegister(instruction,
3653 out_loc,
3654 super_offset,
3655 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003656 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003657 // If `out` is null, we use it for the result, and jump to `done`.
3658 __ Cbz(out, &done);
3659 __ Cmp(out, cls);
3660 __ B(ne, &loop);
3661 __ Mov(out, 1);
3662 if (zero.IsLinked()) {
3663 __ B(&done);
3664 }
3665 break;
3666 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003667
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003668 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003669 ReadBarrierOption read_barrier_option =
3670 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003671 // /* HeapReference<Class> */ out = obj->klass_
3672 GenerateReferenceLoadTwoRegisters(instruction,
3673 out_loc,
3674 obj_loc,
3675 class_offset,
3676 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003677 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003678 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003679 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003680 __ Bind(&loop);
3681 __ Cmp(out, cls);
3682 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003683 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003684 GenerateReferenceLoadOneRegister(instruction,
3685 out_loc,
3686 super_offset,
3687 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003688 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003689 __ Cbnz(out, &loop);
3690 // If `out` is null, we use it for the result, and jump to `done`.
3691 __ B(&done);
3692 __ Bind(&success);
3693 __ Mov(out, 1);
3694 if (zero.IsLinked()) {
3695 __ B(&done);
3696 }
3697 break;
3698 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003699
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003700 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003701 ReadBarrierOption read_barrier_option =
3702 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003703 // /* HeapReference<Class> */ out = obj->klass_
3704 GenerateReferenceLoadTwoRegisters(instruction,
3705 out_loc,
3706 obj_loc,
3707 class_offset,
3708 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003709 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003710 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003711 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003712 __ Cmp(out, cls);
3713 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003714 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003715 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003716 GenerateReferenceLoadOneRegister(instruction,
3717 out_loc,
3718 component_offset,
3719 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003720 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003721 // If `out` is null, we use it for the result, and jump to `done`.
3722 __ Cbz(out, &done);
3723 __ Ldrh(out, HeapOperand(out, primitive_offset));
3724 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3725 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003726 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003727 __ Mov(out, 1);
3728 __ B(&done);
3729 break;
3730 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003731
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003732 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003733 // No read barrier since the slow path will retry upon failure.
3734 // /* HeapReference<Class> */ out = obj->klass_
3735 GenerateReferenceLoadTwoRegisters(instruction,
3736 out_loc,
3737 obj_loc,
3738 class_offset,
3739 maybe_temp_loc,
3740 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003741 __ Cmp(out, cls);
3742 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01003743 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
Andreas Gampe3db70682018-12-26 15:12:03 -08003744 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003745 codegen_->AddSlowPath(slow_path);
3746 __ B(ne, slow_path->GetEntryLabel());
3747 __ Mov(out, 1);
3748 if (zero.IsLinked()) {
3749 __ B(&done);
3750 }
3751 break;
3752 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003753
Calin Juravle98893e12015-10-02 21:05:03 +01003754 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003755 case TypeCheckKind::kInterfaceCheck: {
3756 // Note that we indeed only call on slow path, but we always go
3757 // into the slow path for the unresolved and interface check
3758 // cases.
3759 //
3760 // We cannot directly call the InstanceofNonTrivial runtime
3761 // entry point without resorting to a type checking slow path
3762 // here (i.e. by calling InvokeRuntime directly), as it would
3763 // require to assign fixed registers for the inputs of this
3764 // HInstanceOf instruction (following the runtime calling
3765 // convention), which might be cluttered by the potential first
3766 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003767 //
3768 // TODO: Introduce a new runtime entry point taking the object
3769 // to test (instead of its class) as argument, and let it deal
3770 // with the read barrier issues. This will let us refactor this
3771 // case of the `switch` code as it was previously (with a direct
3772 // call to the runtime not using a type checking slow path).
3773 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003774 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01003775 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
Andreas Gampe3db70682018-12-26 15:12:03 -08003776 instruction, /* is_fatal= */ false);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003777 codegen_->AddSlowPath(slow_path);
3778 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003779 if (zero.IsLinked()) {
3780 __ B(&done);
3781 }
3782 break;
3783 }
Vladimir Marko175e7862018-03-27 09:03:13 +00003784
3785 case TypeCheckKind::kBitstringCheck: {
3786 // /* HeapReference<Class> */ temp = obj->klass_
3787 GenerateReferenceLoadTwoRegisters(instruction,
3788 out_loc,
3789 obj_loc,
3790 class_offset,
3791 maybe_temp_loc,
3792 kWithoutReadBarrier);
3793
3794 GenerateBitstringTypeCheckCompare(instruction, out);
3795 __ Cset(out, eq);
3796 if (zero.IsLinked()) {
3797 __ B(&done);
3798 }
3799 break;
3800 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003801 }
3802
3803 if (zero.IsLinked()) {
3804 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003805 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003806 }
3807
3808 if (done.IsLinked()) {
3809 __ Bind(&done);
3810 }
3811
3812 if (slow_path != nullptr) {
3813 __ Bind(slow_path->GetExitLabel());
3814 }
3815}
3816
3817void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003818 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00003819 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01003820 LocationSummary* locations =
3821 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003822 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003823 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3824 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3825 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3826 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3827 } else {
3828 locations->SetInAt(1, Location::RequiresRegister());
3829 }
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003830 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
3831 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003832}
3833
3834void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003835 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003836 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003837 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003838 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00003839 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
3840 ? Register()
3841 : InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003842 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3843 DCHECK_GE(num_temps, 1u);
3844 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003845 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003846 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
3847 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003848 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003849 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3850 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3851 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3852 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3853 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3854 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3855 const uint32_t object_array_data_offset =
3856 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003857
Vladimir Marko87584542017-12-12 17:47:52 +00003858 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003859 SlowPathCodeARM64* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003860 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
3861 instruction, is_type_check_slow_path_fatal);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003862 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003863
Scott Wakeling97c72b72016-06-24 16:19:36 +01003864 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003865 // Avoid null check if we know obj is not null.
3866 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003867 __ Cbz(obj, &done);
3868 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003869
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003870 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003871 case TypeCheckKind::kExactCheck:
3872 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003873 // /* HeapReference<Class> */ temp = obj->klass_
3874 GenerateReferenceLoadTwoRegisters(instruction,
3875 temp_loc,
3876 obj_loc,
3877 class_offset,
3878 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003879 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003880
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003881 __ Cmp(temp, cls);
3882 // Jump to slow path for throwing the exception or doing a
3883 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003884 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003885 break;
3886 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003887
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003888 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003889 // /* HeapReference<Class> */ temp = obj->klass_
3890 GenerateReferenceLoadTwoRegisters(instruction,
3891 temp_loc,
3892 obj_loc,
3893 class_offset,
3894 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003895 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003896
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003897 // If the class is abstract, we eagerly fetch the super class of the
3898 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003899 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003900 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003901 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003902 GenerateReferenceLoadOneRegister(instruction,
3903 temp_loc,
3904 super_offset,
3905 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003906 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003907
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003908 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3909 // exception.
3910 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3911 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003912 __ Cmp(temp, cls);
3913 __ B(ne, &loop);
3914 break;
3915 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003916
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003917 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003918 // /* HeapReference<Class> */ temp = obj->klass_
3919 GenerateReferenceLoadTwoRegisters(instruction,
3920 temp_loc,
3921 obj_loc,
3922 class_offset,
3923 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003924 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003925
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003926 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003927 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003928 __ Bind(&loop);
3929 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003930 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003931
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003932 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003933 GenerateReferenceLoadOneRegister(instruction,
3934 temp_loc,
3935 super_offset,
3936 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003937 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003938
3939 // If the class reference currently in `temp` is not null, jump
3940 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003941 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003942 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003943 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003944 break;
3945 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003946
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003947 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003948 // /* HeapReference<Class> */ temp = obj->klass_
3949 GenerateReferenceLoadTwoRegisters(instruction,
3950 temp_loc,
3951 obj_loc,
3952 class_offset,
3953 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003954 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003955
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003956 // Do an exact check.
3957 __ Cmp(temp, cls);
3958 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003959
3960 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003961 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003962 GenerateReferenceLoadOneRegister(instruction,
3963 temp_loc,
3964 component_offset,
3965 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003966 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003967
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003968 // If the component type is null, jump to the slow path to throw the exception.
3969 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3970 // Otherwise, the object is indeed an array. Further check that this component type is not a
3971 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003972 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3973 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003974 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003975 break;
3976 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003977
Calin Juravle98893e12015-10-02 21:05:03 +01003978 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003979 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003980 //
3981 // We cannot directly call the CheckCast runtime entry point
3982 // without resorting to a type checking slow path here (i.e. by
3983 // calling InvokeRuntime directly), as it would require to
3984 // assign fixed registers for the inputs of this HInstanceOf
3985 // instruction (following the runtime calling convention), which
3986 // might be cluttered by the potential first read barrier
3987 // emission at the beginning of this method.
3988 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003989 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003990 case TypeCheckKind::kInterfaceCheck: {
3991 // /* HeapReference<Class> */ temp = obj->klass_
3992 GenerateReferenceLoadTwoRegisters(instruction,
3993 temp_loc,
3994 obj_loc,
3995 class_offset,
3996 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003997 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003998
3999 // /* HeapReference<Class> */ temp = temp->iftable_
4000 GenerateReferenceLoadTwoRegisters(instruction,
4001 temp_loc,
4002 temp_loc,
4003 iftable_offset,
4004 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004005 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08004006 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004007 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08004008 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004009 vixl::aarch64::Label start_loop;
4010 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004011 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004012 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
4013 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004014 // Go to next interface.
4015 __ Add(temp, temp, 2 * kHeapReferenceSize);
4016 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004017 // Compare the classes and continue the loop if they do not match.
4018 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
4019 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004020 break;
4021 }
Vladimir Marko175e7862018-03-27 09:03:13 +00004022
4023 case TypeCheckKind::kBitstringCheck: {
4024 // /* HeapReference<Class> */ temp = obj->klass_
4025 GenerateReferenceLoadTwoRegisters(instruction,
4026 temp_loc,
4027 obj_loc,
4028 class_offset,
4029 maybe_temp2_loc,
4030 kWithoutReadBarrier);
4031
4032 GenerateBitstringTypeCheckCompare(instruction, temp);
4033 __ B(ne, type_check_slow_path->GetEntryLabel());
4034 break;
4035 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004036 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00004037 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004038
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004039 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004040}
4041
Alexandre Rames5319def2014-10-23 10:03:10 +01004042void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004043 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01004044 locations->SetOut(Location::ConstantLocation(constant));
4045}
4046
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004047void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004048 // Will be generated at use site.
4049}
4050
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004051void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004052 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004053 locations->SetOut(Location::ConstantLocation(constant));
4054}
4055
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004056void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004057 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004058}
4059
Calin Juravle175dc732015-08-25 15:42:32 +01004060void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4061 // The trampoline uses the same calling convention as dex calling conventions,
4062 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
4063 // the method_idx.
4064 HandleInvoke(invoke);
4065}
4066
4067void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4068 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
Andreas Gampe3db70682018-12-26 15:12:03 -08004069 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Calin Juravle175dc732015-08-25 15:42:32 +01004070}
4071
Alexandre Rames5319def2014-10-23 10:03:10 +01004072void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01004073 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01004074 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01004075}
4076
Alexandre Rames67555f72014-11-18 10:55:16 +00004077void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4078 HandleInvoke(invoke);
4079}
4080
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00004081void CodeGeneratorARM64::MaybeGenerateInlineCacheCheck(HInstruction* instruction,
4082 Register klass) {
4083 DCHECK_EQ(klass.GetCode(), 0u);
Nicolas Geoffray20036d82019-11-28 16:15:00 +00004084 // We know the destination of an intrinsic, so no need to record inline
4085 // caches.
4086 if (!instruction->GetLocations()->Intrinsified() &&
Nicolas Geoffray9b5271e2019-12-04 14:39:46 +00004087 GetGraph()->IsCompilingBaseline() &&
Nicolas Geoffray20036d82019-11-28 16:15:00 +00004088 !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00004089 DCHECK(!instruction->GetEnvironment()->IsFromInlinedInvoke());
4090 ScopedObjectAccess soa(Thread::Current());
4091 ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
4092 InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
4093 uint64_t address = reinterpret_cast64<uint64_t>(cache);
4094 vixl::aarch64::Label done;
4095 __ Mov(x8, address);
4096 __ Ldr(x9, MemOperand(x8, InlineCache::ClassesOffset().Int32Value()));
4097 // Fast path for a monomorphic cache.
4098 __ Cmp(klass, x9);
4099 __ B(eq, &done);
4100 InvokeRuntime(kQuickUpdateInlineCache, instruction, instruction->GetDexPc());
4101 __ Bind(&done);
4102 }
4103}
4104
Alexandre Rames67555f72014-11-18 10:55:16 +00004105void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4106 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004107 LocationSummary* locations = invoke->GetLocations();
4108 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004109 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00004110 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004111 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00004112
Artem Serov914d7a82017-02-07 14:33:49 +00004113 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00004114 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004115 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00004116 {
4117 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4118 // /* HeapReference<Class> */ temp = temp->klass_
4119 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
4120 codegen_->MaybeRecordImplicitNullCheck(invoke);
4121 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004122 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00004123 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004124 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07004125 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00004126 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00004127 }
Artem Serov914d7a82017-02-07 14:33:49 +00004128
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004129 // Instead of simply (possibly) unpoisoning `temp` here, we should
4130 // emit a read barrier for the previous class reference load.
4131 // However this is not required in practice, as this is an
4132 // intermediate/temporary reference and because the current
4133 // concurrent copying collector keeps the from-space memory
4134 // intact/accessible until the end of the marking phase (the
4135 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004136 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00004137
4138 // If we're compiling baseline, update the inline cache.
4139 codegen_->MaybeGenerateInlineCacheCheck(invoke, temp);
4140
4141 // The register ip1 is required to be used for the hidden argument in
4142 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
4143 MacroAssembler* masm = GetVIXLAssembler();
4144 UseScratchRegisterScope scratch_scope(masm);
4145 scratch_scope.Exclude(ip1);
4146 __ Mov(ip1, invoke->GetDexMethodIndex());
4147
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004148 __ Ldr(temp,
4149 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4150 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004151 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004152 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004153 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004154 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004155 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004156
4157 {
4158 // Ensure the pc position is recorded immediately after the `blr` instruction.
4159 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4160
4161 // lr();
4162 __ blr(lr);
4163 DCHECK(!codegen_->IsLeafMethod());
4164 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4165 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004166
Andreas Gampe3db70682018-12-26 15:12:03 -08004167 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004168}
4169
4170void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004171 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004172 if (intrinsic.TryDispatch(invoke)) {
4173 return;
4174 }
4175
Alexandre Rames67555f72014-11-18 10:55:16 +00004176 HandleInvoke(invoke);
4177}
4178
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004179void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004180 // Explicit clinit checks triggered by static invokes must have been pruned by
4181 // art::PrepareForRegisterAllocation.
4182 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004183
Vladimir Markoca6fff82017-10-03 14:49:14 +01004184 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004185 if (intrinsic.TryDispatch(invoke)) {
4186 return;
4187 }
4188
Alexandre Rames67555f72014-11-18 10:55:16 +00004189 HandleInvoke(invoke);
4190}
4191
Andreas Gampe878d58c2015-01-15 23:24:00 -08004192static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4193 if (invoke->GetLocations()->Intrinsified()) {
4194 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4195 intrinsic.Dispatch(invoke);
4196 return true;
4197 }
4198 return false;
4199}
4200
Vladimir Markodc151b22015-10-15 18:02:30 +01004201HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4202 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +01004203 ArtMethod* method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004204 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004205 return desired_dispatch_info;
4206}
4207
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004208void CodeGeneratorARM64::GenerateStaticOrDirectCall(
4209 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004210 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004211 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4212 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004213 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4214 uint32_t offset =
4215 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004216 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004217 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004218 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004219 }
Vladimir Marko58155012015-08-19 12:49:41 +00004220 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004221 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004222 break;
Vladimir Marko65979462017-05-19 17:25:12 +01004223 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01004224 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
Vladimir Marko65979462017-05-19 17:25:12 +01004225 // Add ADRP with its PC-relative method patch.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004226 vixl::aarch64::Label* adrp_label = NewBootImageMethodPatch(invoke->GetTargetMethod());
Vladimir Marko65979462017-05-19 17:25:12 +01004227 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4228 // Add ADD with its PC-relative method patch.
4229 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004230 NewBootImageMethodPatch(invoke->GetTargetMethod(), adrp_label);
Vladimir Marko65979462017-05-19 17:25:12 +01004231 EmitAddPlaceholder(add_label, XRegisterFrom(temp), XRegisterFrom(temp));
4232 break;
4233 }
Vladimir Markob066d432018-01-03 13:14:37 +00004234 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
4235 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004236 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00004237 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_offset);
4238 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4239 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
4240 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_offset, adrp_label);
4241 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
4242 EmitLdrOffsetPlaceholder(ldr_label, WRegisterFrom(temp), XRegisterFrom(temp));
4243 break;
4244 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004245 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Markob066d432018-01-03 13:14:37 +00004246 // Add ADRP with its PC-relative .bss entry patch.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004247 MethodReference target_method(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
4248 vixl::aarch64::Label* adrp_label = NewMethodBssEntryPatch(target_method);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004249 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Markob066d432018-01-03 13:14:37 +00004250 // Add LDR with its PC-relative .bss entry patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004251 vixl::aarch64::Label* ldr_label =
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004252 NewMethodBssEntryPatch(target_method, adrp_label);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01004253 // All aligned loads are implicitly atomic consume operations on ARM64.
Vladimir Markoaad75c62016-10-03 08:46:48 +00004254 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004255 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004256 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004257 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
4258 // Load method address from literal pool.
4259 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
4260 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004261 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
4262 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
4263 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko58155012015-08-19 12:49:41 +00004264 }
4265 }
4266
4267 switch (invoke->GetCodePtrLocation()) {
4268 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004269 {
4270 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
4271 ExactAssemblyScope eas(GetVIXLAssembler(),
4272 kInstructionSize,
4273 CodeBufferCheckScope::kExactSize);
4274 __ bl(&frame_entry_label_);
4275 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
4276 }
Vladimir Marko58155012015-08-19 12:49:41 +00004277 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004278 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4279 // LR = callee_method->entry_point_from_quick_compiled_code_;
4280 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004281 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004282 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004283 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004284 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004285 ExactAssemblyScope eas(GetVIXLAssembler(),
4286 kInstructionSize,
4287 CodeBufferCheckScope::kExactSize);
4288 // lr()
4289 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004290 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004291 }
Vladimir Marko58155012015-08-19 12:49:41 +00004292 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004293 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004294
Andreas Gampe878d58c2015-01-15 23:24:00 -08004295 DCHECK(!IsLeafMethod());
4296}
4297
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004298void CodeGeneratorARM64::GenerateVirtualCall(
4299 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004300 // Use the calling convention instead of the location of the receiver, as
4301 // intrinsics may have put the receiver in a different register. In the intrinsics
4302 // slow path, the arguments have been moved to the right place, so here we are
4303 // guaranteed that the receiver is the first register of the calling convention.
4304 InvokeDexCallingConvention calling_convention;
4305 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004306 Register temp = XRegisterFrom(temp_in);
4307 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4308 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4309 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004310 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004311
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004312 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004313
4314 {
4315 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4316 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4317 // /* HeapReference<Class> */ temp = receiver->klass_
4318 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4319 MaybeRecordImplicitNullCheck(invoke);
4320 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004321 // Instead of simply (possibly) unpoisoning `temp` here, we should
4322 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004323 // intermediate/temporary reference and because the current
4324 // concurrent copying collector keeps the from-space memory
4325 // intact/accessible until the end of the marking phase (the
4326 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004327 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00004328
4329 // If we're compiling baseline, update the inline cache.
4330 MaybeGenerateInlineCacheCheck(invoke, temp);
4331
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004332 // temp = temp->GetMethodAt(method_offset);
4333 __ Ldr(temp, MemOperand(temp, method_offset));
4334 // lr = temp->GetEntryPoint();
4335 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004336 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004337 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004338 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4339 // lr();
4340 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004341 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004342 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004343}
4344
Orion Hodsonac141392017-01-13 11:53:47 +00004345void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4346 HandleInvoke(invoke);
4347}
4348
4349void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4350 codegen_->GenerateInvokePolymorphicCall(invoke);
Andreas Gampe3db70682018-12-26 15:12:03 -08004351 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Orion Hodsonac141392017-01-13 11:53:47 +00004352}
4353
Orion Hodson4c8e12e2018-05-18 08:33:20 +01004354void LocationsBuilderARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4355 HandleInvoke(invoke);
4356}
4357
4358void InstructionCodeGeneratorARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4359 codegen_->GenerateInvokeCustomCall(invoke);
Andreas Gampe3db70682018-12-26 15:12:03 -08004360 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Orion Hodson4c8e12e2018-05-18 08:33:20 +01004361}
4362
Vladimir Marko6fd16062018-06-26 11:02:04 +01004363vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageIntrinsicPatch(
4364 uint32_t intrinsic_data,
4365 vixl::aarch64::Label* adrp_label) {
4366 return NewPcRelativePatch(
Vladimir Marko2d06e022019-07-08 15:45:19 +01004367 /* dex_file= */ nullptr, intrinsic_data, adrp_label, &boot_image_other_patches_);
Vladimir Marko6fd16062018-06-26 11:02:04 +01004368}
4369
Vladimir Markob066d432018-01-03 13:14:37 +00004370vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageRelRoPatch(
4371 uint32_t boot_image_offset,
4372 vixl::aarch64::Label* adrp_label) {
4373 return NewPcRelativePatch(
Vladimir Marko2d06e022019-07-08 15:45:19 +01004374 /* dex_file= */ nullptr, boot_image_offset, adrp_label, &boot_image_other_patches_);
Vladimir Markob066d432018-01-03 13:14:37 +00004375}
4376
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004377vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageMethodPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004378 MethodReference target_method,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004379 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004380 return NewPcRelativePatch(
4381 target_method.dex_file, target_method.index, adrp_label, &boot_image_method_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004382}
4383
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004384vixl::aarch64::Label* CodeGeneratorARM64::NewMethodBssEntryPatch(
4385 MethodReference target_method,
4386 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004387 return NewPcRelativePatch(
4388 target_method.dex_file, target_method.index, adrp_label, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004389}
4390
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004391vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageTypePatch(
Scott Wakeling97c72b72016-06-24 16:19:36 +01004392 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004393 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004394 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004395 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &boot_image_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004396}
4397
Vladimir Marko1998cd02017-01-13 13:02:58 +00004398vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4399 const DexFile& dex_file,
4400 dex::TypeIndex type_index,
4401 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004402 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004403}
4404
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004405vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageStringPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004406 const DexFile& dex_file,
4407 dex::StringIndex string_index,
4408 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004409 return NewPcRelativePatch(
4410 &dex_file, string_index.index_, adrp_label, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01004411}
4412
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004413vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch(
4414 const DexFile& dex_file,
4415 dex::StringIndex string_index,
4416 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004417 return NewPcRelativePatch(&dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004418}
4419
Vladimir Markof6675082019-05-17 12:05:28 +01004420void CodeGeneratorARM64::EmitEntrypointThunkCall(ThreadOffset64 entrypoint_offset) {
4421 DCHECK(!__ AllowMacroInstructions()); // In ExactAssemblyScope.
4422 DCHECK(!Runtime::Current()->UseJitCompilation());
4423 call_entrypoint_patches_.emplace_back(/*dex_file*/ nullptr, entrypoint_offset.Uint32Value());
4424 vixl::aarch64::Label* bl_label = &call_entrypoint_patches_.back().label;
4425 __ bind(bl_label);
4426 __ bl(static_cast<int64_t>(0)); // Placeholder, patched at link-time.
4427}
4428
Vladimir Marko966b46f2018-08-03 10:20:19 +00004429void CodeGeneratorARM64::EmitBakerReadBarrierCbnz(uint32_t custom_data) {
Vladimir Marko94796f82018-08-08 15:15:33 +01004430 DCHECK(!__ AllowMacroInstructions()); // In ExactAssemblyScope.
Vladimir Marko966b46f2018-08-03 10:20:19 +00004431 if (Runtime::Current()->UseJitCompilation()) {
4432 auto it = jit_baker_read_barrier_slow_paths_.FindOrAdd(custom_data);
4433 vixl::aarch64::Label* slow_path_entry = &it->second.label;
4434 __ cbnz(mr, slow_path_entry);
4435 } else {
4436 baker_read_barrier_patches_.emplace_back(custom_data);
4437 vixl::aarch64::Label* cbnz_label = &baker_read_barrier_patches_.back().label;
4438 __ bind(cbnz_label);
4439 __ cbnz(mr, static_cast<int64_t>(0)); // Placeholder, patched at link-time.
4440 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004441}
4442
Scott Wakeling97c72b72016-06-24 16:19:36 +01004443vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004444 const DexFile* dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004445 uint32_t offset_or_index,
4446 vixl::aarch64::Label* adrp_label,
4447 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004448 // Add a patch entry and return the label.
4449 patches->emplace_back(dex_file, offset_or_index);
4450 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004451 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004452 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4453 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4454 return label;
4455}
4456
Scott Wakeling97c72b72016-06-24 16:19:36 +01004457vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4458 uint64_t address) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004459 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004460}
4461
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004462vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004463 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004464 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004465 return jit_string_patches_.GetOrCreate(
4466 StringReference(&dex_file, string_index),
Andreas Gampe3db70682018-12-26 15:12:03 -08004467 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); });
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004468}
4469
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004470vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004471 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004472 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004473 return jit_class_patches_.GetOrCreate(
4474 TypeReference(&dex_file, type_index),
Andreas Gampe3db70682018-12-26 15:12:03 -08004475 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); });
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004476}
4477
Vladimir Markoaad75c62016-10-03 08:46:48 +00004478void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4479 vixl::aarch64::Register reg) {
4480 DCHECK(reg.IsX());
4481 SingleEmissionCheckScope guard(GetVIXLAssembler());
4482 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004483 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004484}
4485
4486void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4487 vixl::aarch64::Register out,
4488 vixl::aarch64::Register base) {
4489 DCHECK(out.IsX());
4490 DCHECK(base.IsX());
4491 SingleEmissionCheckScope guard(GetVIXLAssembler());
4492 __ Bind(fixup_label);
4493 __ add(out, base, Operand(/* offset placeholder */ 0));
4494}
4495
4496void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4497 vixl::aarch64::Register out,
4498 vixl::aarch64::Register base) {
4499 DCHECK(base.IsX());
4500 SingleEmissionCheckScope guard(GetVIXLAssembler());
4501 __ Bind(fixup_label);
4502 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4503}
4504
Vladimir Markoeebb8212018-06-05 14:57:24 +01004505void CodeGeneratorARM64::LoadBootImageAddress(vixl::aarch64::Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01004506 uint32_t boot_image_reference) {
4507 if (GetCompilerOptions().IsBootImage()) {
4508 // Add ADRP with its PC-relative type patch.
4509 vixl::aarch64::Label* adrp_label = NewBootImageIntrinsicPatch(boot_image_reference);
4510 EmitAdrpPlaceholder(adrp_label, reg.X());
4511 // Add ADD with its PC-relative type patch.
4512 vixl::aarch64::Label* add_label = NewBootImageIntrinsicPatch(boot_image_reference, adrp_label);
4513 EmitAddPlaceholder(add_label, reg.X(), reg.X());
Vladimir Markoa2da9b92018-10-10 14:21:55 +01004514 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01004515 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004516 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004517 EmitAdrpPlaceholder(adrp_label, reg.X());
4518 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004519 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_reference, adrp_label);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004520 EmitLdrOffsetPlaceholder(ldr_label, reg.W(), reg.X());
4521 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004522 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01004523 gc::Heap* heap = Runtime::Current()->GetHeap();
4524 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004525 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01004526 __ Ldr(reg.W(), DeduplicateBootImageAddressLiteral(reinterpret_cast<uintptr_t>(address)));
4527 }
4528}
4529
Vladimir Marko6fd16062018-06-26 11:02:04 +01004530void CodeGeneratorARM64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
4531 uint32_t boot_image_offset) {
4532 DCHECK(invoke->IsStatic());
4533 InvokeRuntimeCallingConvention calling_convention;
4534 Register argument = calling_convention.GetRegisterAt(0);
4535 if (GetCompilerOptions().IsBootImage()) {
4536 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
4537 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
4538 MethodReference target_method = invoke->GetTargetMethod();
4539 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
4540 // Add ADRP with its PC-relative type patch.
4541 vixl::aarch64::Label* adrp_label = NewBootImageTypePatch(*target_method.dex_file, type_idx);
4542 EmitAdrpPlaceholder(adrp_label, argument.X());
4543 // Add ADD with its PC-relative type patch.
4544 vixl::aarch64::Label* add_label =
4545 NewBootImageTypePatch(*target_method.dex_file, type_idx, adrp_label);
4546 EmitAddPlaceholder(add_label, argument.X(), argument.X());
4547 } else {
4548 LoadBootImageAddress(argument, boot_image_offset);
4549 }
4550 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
4551 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4552}
4553
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004554template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00004555inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4556 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004557 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004558 for (const PcRelativePatchInfo& info : infos) {
4559 linker_patches->push_back(Factory(info.label.GetLocation(),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004560 info.target_dex_file,
Vladimir Markoaad75c62016-10-03 08:46:48 +00004561 info.pc_insn_label->GetLocation(),
4562 info.offset_or_index));
4563 }
4564}
4565
Vladimir Marko6fd16062018-06-26 11:02:04 +01004566template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
4567linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
4568 const DexFile* target_dex_file,
4569 uint32_t pc_insn_offset,
4570 uint32_t boot_image_offset) {
4571 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
4572 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00004573}
4574
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004575void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00004576 DCHECK(linker_patches->empty());
4577 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004578 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004579 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004580 boot_image_type_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004581 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004582 boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004583 string_bss_entry_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01004584 boot_image_other_patches_.size() +
Vladimir Markof6675082019-05-17 12:05:28 +01004585 call_entrypoint_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004586 baker_read_barrier_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004587 linker_patches->reserve(size);
Vladimir Marko44ca0752019-07-29 10:18:25 +01004588 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004589 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004590 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004591 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004592 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004593 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004594 boot_image_string_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01004595 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01004596 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004597 DCHECK(boot_image_type_patches_.empty());
4598 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01004599 }
4600 if (GetCompilerOptions().IsBootImage()) {
4601 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
4602 boot_image_other_patches_, linker_patches);
4603 } else {
4604 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
4605 boot_image_other_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004606 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004607 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
4608 method_bss_entry_patches_, linker_patches);
4609 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
4610 type_bss_entry_patches_, linker_patches);
4611 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
4612 string_bss_entry_patches_, linker_patches);
Vladimir Markof6675082019-05-17 12:05:28 +01004613 for (const PatchInfo<vixl::aarch64::Label>& info : call_entrypoint_patches_) {
4614 DCHECK(info.target_dex_file == nullptr);
4615 linker_patches->push_back(linker::LinkerPatch::CallEntrypointPatch(
4616 info.label.GetLocation(), info.offset_or_index));
4617 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004618 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004619 linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
4620 info.label.GetLocation(), info.custom_data));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004621 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004622 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004623}
4624
Vladimir Markoca1e0382018-04-11 09:58:41 +00004625bool CodeGeneratorARM64::NeedsThunkCode(const linker::LinkerPatch& patch) const {
Vladimir Markof6675082019-05-17 12:05:28 +01004626 return patch.GetType() == linker::LinkerPatch::Type::kCallEntrypoint ||
4627 patch.GetType() == linker::LinkerPatch::Type::kBakerReadBarrierBranch ||
Vladimir Markoca1e0382018-04-11 09:58:41 +00004628 patch.GetType() == linker::LinkerPatch::Type::kCallRelative;
4629}
4630
4631void CodeGeneratorARM64::EmitThunkCode(const linker::LinkerPatch& patch,
4632 /*out*/ ArenaVector<uint8_t>* code,
4633 /*out*/ std::string* debug_name) {
4634 Arm64Assembler assembler(GetGraph()->GetAllocator());
4635 switch (patch.GetType()) {
4636 case linker::LinkerPatch::Type::kCallRelative: {
4637 // The thunk just uses the entry point in the ArtMethod. This works even for calls
4638 // to the generic JNI and interpreter trampolines.
4639 Offset offset(ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4640 kArm64PointerSize).Int32Value());
4641 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0));
4642 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
4643 *debug_name = "MethodCallThunk";
4644 }
4645 break;
4646 }
Vladimir Markof6675082019-05-17 12:05:28 +01004647 case linker::LinkerPatch::Type::kCallEntrypoint: {
4648 Offset offset(patch.EntrypointOffset());
4649 assembler.JumpTo(ManagedRegister(arm64::TR), offset, ManagedRegister(arm64::IP0));
4650 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
4651 *debug_name = "EntrypointCallThunk_" + std::to_string(offset.Uint32Value());
4652 }
4653 break;
4654 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00004655 case linker::LinkerPatch::Type::kBakerReadBarrierBranch: {
4656 DCHECK_EQ(patch.GetBakerCustomValue2(), 0u);
4657 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name);
4658 break;
4659 }
4660 default:
4661 LOG(FATAL) << "Unexpected patch type " << patch.GetType();
4662 UNREACHABLE();
4663 }
4664
4665 // Ensure we emit the literal pool if any.
4666 assembler.FinalizeCode();
4667 code->resize(assembler.CodeSize());
4668 MemoryRegion code_region(code->data(), code->size());
4669 assembler.FinalizeInstructions(code_region);
4670}
4671
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004672vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value) {
4673 return uint32_literals_.GetOrCreate(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004674 value,
4675 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4676}
4677
Scott Wakeling97c72b72016-06-24 16:19:36 +01004678vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004679 return uint64_literals_.GetOrCreate(
4680 value,
4681 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004682}
4683
Andreas Gampe878d58c2015-01-15 23:24:00 -08004684void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004685 // Explicit clinit checks triggered by static invokes must have been pruned by
4686 // art::PrepareForRegisterAllocation.
4687 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004688
Andreas Gampe878d58c2015-01-15 23:24:00 -08004689 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Andreas Gampe3db70682018-12-26 15:12:03 -08004690 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004691 return;
4692 }
4693
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004694 {
4695 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4696 // are no pools emitted.
4697 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4698 LocationSummary* locations = invoke->GetLocations();
4699 codegen_->GenerateStaticOrDirectCall(
4700 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
4701 }
4702
Andreas Gampe3db70682018-12-26 15:12:03 -08004703 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004704}
4705
4706void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004707 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Andreas Gampe3db70682018-12-26 15:12:03 -08004708 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004709 return;
4710 }
4711
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004712 {
4713 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
4714 // are no pools emitted.
4715 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4716 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
4717 DCHECK(!codegen_->IsLeafMethod());
4718 }
4719
Andreas Gampe3db70682018-12-26 15:12:03 -08004720 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004721}
4722
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004723HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4724 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004725 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004726 case HLoadClass::LoadKind::kInvalid:
4727 LOG(FATAL) << "UNREACHABLE";
4728 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004729 case HLoadClass::LoadKind::kReferrersClass:
4730 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004731 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004732 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004733 case HLoadClass::LoadKind::kBssEntry:
4734 DCHECK(!Runtime::Current()->UseJitCompilation());
4735 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004736 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004737 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004738 DCHECK(Runtime::Current()->UseJitCompilation());
4739 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004740 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004741 break;
4742 }
4743 return desired_class_load_kind;
4744}
4745
Alexandre Rames67555f72014-11-18 10:55:16 +00004746void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00004747 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004748 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004749 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00004750 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004751 cls,
4752 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00004753 LocationFrom(vixl::aarch64::x0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00004754 DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004755 return;
4756 }
Vladimir Marko41559982017-01-06 14:04:23 +00004757 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004758
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004759 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
4760 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004761 ? LocationSummary::kCallOnSlowPath
4762 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01004763 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004764 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004765 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004766 }
4767
Vladimir Marko41559982017-01-06 14:04:23 +00004768 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004769 locations->SetInAt(0, Location::RequiresRegister());
4770 }
4771 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004772 if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
4773 if (!kUseReadBarrier || kUseBakerReadBarrier) {
4774 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01004775 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004776 } else {
4777 // For non-Baker read barrier we have a temp-clobbering call.
4778 }
4779 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004780}
4781
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004782// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4783// move.
4784void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00004785 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004786 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00004787 codegen_->GenerateLoadClassRuntimeCall(cls);
Andreas Gampe3db70682018-12-26 15:12:03 -08004788 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Calin Juravle580b6092015-10-06 17:35:58 +01004789 return;
4790 }
Vladimir Marko41559982017-01-06 14:04:23 +00004791 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01004792
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004793 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004794 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004795
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004796 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
4797 ? kWithoutReadBarrier
4798 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004799 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00004800 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004801 case HLoadClass::LoadKind::kReferrersClass: {
4802 DCHECK(!cls->CanCallRuntime());
4803 DCHECK(!cls->MustGenerateClinitCheck());
4804 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4805 Register current_method = InputRegisterAt(cls, 0);
Vladimir Markoca1e0382018-04-11 09:58:41 +00004806 codegen_->GenerateGcRootFieldLoad(cls,
4807 out_loc,
4808 current_method,
4809 ArtMethod::DeclaringClassOffset().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08004810 /* fixup_label= */ nullptr,
Vladimir Markoca1e0382018-04-11 09:58:41 +00004811 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004812 break;
4813 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004814 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01004815 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
4816 codegen_->GetCompilerOptions().IsBootImageExtension());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004817 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004818 // Add ADRP with its PC-relative type patch.
4819 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08004820 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004821 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004822 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004823 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004824 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004825 codegen_->NewBootImageTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004826 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004827 break;
4828 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004829 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004830 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004831 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
4832 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
4833 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004834 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004835 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004836 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004837 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004838 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004839 break;
4840 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004841 case HLoadClass::LoadKind::kBssEntry: {
4842 // Add ADRP with its PC-relative Class .bss entry patch.
4843 const DexFile& dex_file = cls->GetDexFile();
4844 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Markof3c52b42017-11-17 17:32:12 +00004845 vixl::aarch64::Register temp = XRegisterFrom(out_loc);
4846 vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
4847 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004848 // Add LDR with its PC-relative Class .bss entry patch.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004849 vixl::aarch64::Label* ldr_label =
Vladimir Markof3c52b42017-11-17 17:32:12 +00004850 codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004851 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markod5fd5c32019-07-02 14:46:32 +01004852 // All aligned loads are implicitly atomic consume operations on ARM64.
Vladimir Markoca1e0382018-04-11 09:58:41 +00004853 codegen_->GenerateGcRootFieldLoad(cls,
4854 out_loc,
4855 temp,
4856 /* offset placeholder */ 0u,
4857 ldr_label,
4858 read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004859 generate_null_check = true;
4860 break;
4861 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004862 case HLoadClass::LoadKind::kJitBootImageAddress: {
4863 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
4864 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
4865 DCHECK_NE(address, 0u);
4866 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
4867 break;
4868 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004869 case HLoadClass::LoadKind::kJitTableAddress: {
4870 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
4871 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004872 cls->GetClass()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00004873 codegen_->GenerateGcRootFieldLoad(cls,
4874 out_loc,
4875 out.X(),
Andreas Gampe3db70682018-12-26 15:12:03 -08004876 /* offset= */ 0,
4877 /* fixup_label= */ nullptr,
Vladimir Markoca1e0382018-04-11 09:58:41 +00004878 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004879 break;
4880 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004881 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004882 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00004883 LOG(FATAL) << "UNREACHABLE";
4884 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004885 }
4886
Vladimir Markoea4c1262017-02-06 19:59:33 +00004887 bool do_clinit = cls->MustGenerateClinitCheck();
4888 if (generate_null_check || do_clinit) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004889 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01004890 SlowPathCodeARM64* slow_path =
4891 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004892 codegen_->AddSlowPath(slow_path);
4893 if (generate_null_check) {
4894 __ Cbz(out, slow_path->GetEntryLabel());
4895 }
4896 if (cls->MustGenerateClinitCheck()) {
4897 GenerateClassInitializationCheck(slow_path, out);
4898 } else {
4899 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004900 }
Andreas Gampe3db70682018-12-26 15:12:03 -08004901 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004902 }
4903}
4904
Orion Hodsondbaa5c72018-05-10 08:22:46 +01004905void LocationsBuilderARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
4906 InvokeRuntimeCallingConvention calling_convention;
4907 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
4908 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
4909}
4910
4911void InstructionCodeGeneratorARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
4912 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
4913}
4914
Orion Hodson18259d72018-04-12 11:18:23 +01004915void LocationsBuilderARM64::VisitLoadMethodType(HLoadMethodType* load) {
4916 InvokeRuntimeCallingConvention calling_convention;
4917 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
4918 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
4919}
4920
4921void InstructionCodeGeneratorARM64::VisitLoadMethodType(HLoadMethodType* load) {
4922 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
4923}
4924
David Brazdilcb1c0552015-08-04 16:22:25 +01004925static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004926 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004927}
4928
Alexandre Rames67555f72014-11-18 10:55:16 +00004929void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4930 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004931 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexandre Rames67555f72014-11-18 10:55:16 +00004932 locations->SetOut(Location::RequiresRegister());
4933}
4934
4935void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004936 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4937}
4938
4939void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004940 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01004941}
4942
4943void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4944 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004945}
4946
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004947HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4948 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004949 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004950 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004951 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00004952 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01004953 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004954 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004955 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004956 case HLoadString::LoadKind::kJitTableAddress:
4957 DCHECK(Runtime::Current()->UseJitCompilation());
4958 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004959 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004960 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004961 }
4962 return desired_string_load_kind;
4963}
4964
Alexandre Rames67555f72014-11-18 10:55:16 +00004965void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004966 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004967 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004968 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004969 InvokeRuntimeCallingConvention calling_convention;
4970 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
4971 } else {
4972 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004973 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
4974 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00004975 // Rely on the pResolveString and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01004976 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004977 } else {
4978 // For non-Baker read barrier we have a temp-clobbering call.
4979 }
4980 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004981 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004982}
4983
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004984// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4985// move.
4986void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00004987 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004988 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004989
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004990 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004991 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01004992 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
4993 codegen_->GetCompilerOptions().IsBootImageExtension());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004994 // Add ADRP with its PC-relative String patch.
4995 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004996 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004997 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004998 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004999 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005000 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005001 codegen_->NewBootImageStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005002 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005003 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005004 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005005 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005006 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005007 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
5008 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
5009 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005010 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005011 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005012 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005013 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005014 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
5015 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005016 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005017 case HLoadString::LoadKind::kBssEntry: {
5018 // Add ADRP with its PC-relative String .bss entry patch.
5019 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005020 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markof3c52b42017-11-17 17:32:12 +00005021 Register temp = XRegisterFrom(out_loc);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005022 vixl::aarch64::Label* adrp_label = codegen_->NewStringBssEntryPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005023 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005024 // Add LDR with its PC-relative String .bss entry patch.
Vladimir Markoaad75c62016-10-03 08:46:48 +00005025 vixl::aarch64::Label* ldr_label =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005026 codegen_->NewStringBssEntryPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005027 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markod5fd5c32019-07-02 14:46:32 +01005028 // All aligned loads are implicitly atomic consume operations on ARM64.
Vladimir Markoca1e0382018-04-11 09:58:41 +00005029 codegen_->GenerateGcRootFieldLoad(load,
5030 out_loc,
5031 temp,
5032 /* offset placeholder */ 0u,
5033 ldr_label,
5034 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005035 SlowPathCodeARM64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00005036 new (codegen_->GetScopedAllocator()) LoadStringSlowPathARM64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005037 codegen_->AddSlowPath(slow_path);
5038 __ Cbz(out.X(), slow_path->GetEntryLabel());
5039 __ Bind(slow_path->GetExitLabel());
Andreas Gampe3db70682018-12-26 15:12:03 -08005040 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005041 return;
5042 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005043 case HLoadString::LoadKind::kJitBootImageAddress: {
5044 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
5045 DCHECK_NE(address, 0u);
5046 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
5047 return;
5048 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005049 case HLoadString::LoadKind::kJitTableAddress: {
5050 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005051 load->GetStringIndex(),
5052 load->GetString()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00005053 codegen_->GenerateGcRootFieldLoad(load,
5054 out_loc,
5055 out.X(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005056 /* offset= */ 0,
5057 /* fixup_label= */ nullptr,
Vladimir Markoca1e0382018-04-11 09:58:41 +00005058 kCompilerReadBarrierOption);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005059 return;
5060 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005061 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005062 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005063 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005064
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005065 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005066 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005067 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005068 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005069 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
5070 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Andreas Gampe3db70682018-12-26 15:12:03 -08005071 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005072}
5073
Alexandre Rames5319def2014-10-23 10:03:10 +01005074void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005075 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01005076 locations->SetOut(Location::ConstantLocation(constant));
5077}
5078
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005079void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005080 // Will be generated at use site.
5081}
5082
Alexandre Rames67555f72014-11-18 10:55:16 +00005083void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005084 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5085 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005086 InvokeRuntimeCallingConvention calling_convention;
5087 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5088}
5089
5090void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01005091 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005092 instruction,
5093 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005094 if (instruction->IsEnter()) {
5095 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5096 } else {
5097 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5098 }
Andreas Gampe3db70682018-12-26 15:12:03 -08005099 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005100}
5101
Alexandre Rames42d641b2014-10-27 14:00:51 +00005102void LocationsBuilderARM64::VisitMul(HMul* mul) {
5103 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005104 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005105 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005106 case DataType::Type::kInt32:
5107 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005108 locations->SetInAt(0, Location::RequiresRegister());
5109 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005110 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005111 break;
5112
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005113 case DataType::Type::kFloat32:
5114 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005115 locations->SetInAt(0, Location::RequiresFpuRegister());
5116 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00005117 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005118 break;
5119
5120 default:
5121 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5122 }
5123}
5124
5125void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
5126 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005127 case DataType::Type::kInt32:
5128 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005129 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
5130 break;
5131
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005132 case DataType::Type::kFloat32:
5133 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005134 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00005135 break;
5136
5137 default:
5138 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5139 }
5140}
5141
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005142void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
5143 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005144 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005145 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005146 case DataType::Type::kInt32:
5147 case DataType::Type::kInt64:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00005148 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00005149 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005150 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005151
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005152 case DataType::Type::kFloat32:
5153 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005154 locations->SetInAt(0, Location::RequiresFpuRegister());
5155 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005156 break;
5157
5158 default:
5159 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5160 }
5161}
5162
5163void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
5164 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005165 case DataType::Type::kInt32:
5166 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005167 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
5168 break;
5169
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005170 case DataType::Type::kFloat32:
5171 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005172 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005173 break;
5174
5175 default:
5176 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5177 }
5178}
5179
5180void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005181 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5182 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005183 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005184 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005185 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5186 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005187}
5188
5189void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01005190 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
5191 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00005192 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005193 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Andreas Gampe3db70682018-12-26 15:12:03 -08005194 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005195}
5196
Alexandre Rames5319def2014-10-23 10:03:10 +01005197void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005198 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5199 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01005200 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07005201 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005202 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexandre Rames5319def2014-10-23 10:03:10 +01005203}
5204
5205void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07005206 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
5207 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Andreas Gampe3db70682018-12-26 15:12:03 -08005208 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005209}
5210
5211void LocationsBuilderARM64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005212 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00005213 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005214 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01005215}
5216
5217void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005218 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005219 case DataType::Type::kInt32:
5220 case DataType::Type::kInt64:
Roland Levillain55dcfb52014-10-24 18:09:09 +01005221 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01005222 break;
5223
5224 default:
5225 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5226 }
5227}
5228
David Brazdil66d126e2015-04-03 16:02:44 +01005229void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005230 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
David Brazdil66d126e2015-04-03 16:02:44 +01005231 locations->SetInAt(0, Location::RequiresRegister());
5232 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5233}
5234
5235void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005236 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01005237}
5238
Alexandre Rames5319def2014-10-23 10:03:10 +01005239void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005240 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5241 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01005242}
5243
Calin Juravle2ae48182016-03-16 14:05:09 +00005244void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5245 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005246 return;
5247 }
Artem Serov914d7a82017-02-07 14:33:49 +00005248 {
Nicolas Geoffray61ba8d22018-08-07 09:55:57 +01005249 // Ensure that between load and RecordPcInfo there are no pools emitted.
Artem Serov914d7a82017-02-07 14:33:49 +00005250 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5251 Location obj = instruction->GetLocations()->InAt(0);
5252 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
5253 RecordPcInfo(instruction, instruction->GetDexPc());
5254 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005255}
5256
Calin Juravle2ae48182016-03-16 14:05:09 +00005257void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005258 SlowPathCodeARM64* slow_path = new (GetScopedAllocator()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005259 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01005260
5261 LocationSummary* locations = instruction->GetLocations();
5262 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005263
5264 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005265}
5266
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005267void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005268 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005269}
5270
Alexandre Rames67555f72014-11-18 10:55:16 +00005271void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5272 HandleBinaryOp(instruction);
5273}
5274
5275void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5276 HandleBinaryOp(instruction);
5277}
5278
Alexandre Rames3e69f162014-12-10 10:36:50 +00005279void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5280 LOG(FATAL) << "Unreachable";
5281}
5282
5283void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005284 if (instruction->GetNext()->IsSuspendCheck() &&
5285 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5286 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5287 // The back edge will generate the suspend check.
5288 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5289 }
5290
Alexandre Rames3e69f162014-12-10 10:36:50 +00005291 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5292}
5293
Alexandre Rames5319def2014-10-23 10:03:10 +01005294void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005295 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005296 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5297 if (location.IsStackSlot()) {
5298 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5299 } else if (location.IsDoubleStackSlot()) {
5300 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5301 }
5302 locations->SetOut(location);
5303}
5304
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005305void InstructionCodeGeneratorARM64::VisitParameterValue(
5306 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005307 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005308}
5309
5310void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5311 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005312 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005313 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005314}
5315
5316void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5317 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5318 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005319}
5320
5321void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005322 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005323 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005324 locations->SetInAt(i, Location::Any());
5325 }
5326 locations->SetOut(Location::Any());
5327}
5328
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005329void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005330 LOG(FATAL) << "Unreachable";
5331}
5332
Serban Constantinescu02164b32014-11-13 14:05:07 +00005333void LocationsBuilderARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005334 DataType::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005335 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005336 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005337 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005338 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005339
5340 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005341 case DataType::Type::kInt32:
5342 case DataType::Type::kInt64:
Serban Constantinescu02164b32014-11-13 14:05:07 +00005343 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005344 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005345 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5346 break;
5347
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005348 case DataType::Type::kFloat32:
5349 case DataType::Type::kFloat64: {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005350 InvokeRuntimeCallingConvention calling_convention;
5351 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5352 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5353 locations->SetOut(calling_convention.GetReturnLocation(type));
5354
5355 break;
5356 }
5357
Serban Constantinescu02164b32014-11-13 14:05:07 +00005358 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005359 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005360 }
5361}
5362
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005363void InstructionCodeGeneratorARM64::GenerateIntRemForPower2Denom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005364 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005365 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
5366 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
5367
5368 Register out = OutputRegister(instruction);
5369 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005370
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005371 if (abs_imm == 2) {
5372 __ Cmp(dividend, 0);
5373 __ And(out, dividend, 1);
5374 __ Csneg(out, out, out, ge);
5375 } else {
5376 UseScratchRegisterScope temps(GetVIXLAssembler());
5377 Register temp = temps.AcquireSameSizeAs(out);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005378
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005379 __ Negs(temp, dividend);
5380 __ And(out, dividend, abs_imm - 1);
5381 __ And(temp, temp, abs_imm - 1);
5382 __ Csneg(out, out, temp, mi);
5383 }
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005384}
5385
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005386void InstructionCodeGeneratorARM64::GenerateIntRemForConstDenom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005387 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005388
5389 if (imm == 0) {
5390 // Do not generate anything.
5391 // DivZeroCheck would prevent any code to be executed.
5392 return;
5393 }
5394
Evgeny Astigeevichf58dc652018-06-25 17:54:07 +01005395 if (IsPowerOfTwo(AbsOrMin(imm))) {
5396 // Cases imm == -1 or imm == 1 are handled in constant folding by
5397 // InstructionWithAbsorbingInputSimplifier.
5398 // If the cases have survided till code generation they are handled in
5399 // GenerateIntRemForPower2Denom becauses -1 and 1 are the power of 2 (2^0).
5400 // The correct code is generated for them, just more instructions.
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005401 GenerateIntRemForPower2Denom(instruction);
5402 } else {
5403 DCHECK(imm < -2 || imm > 2) << imm;
5404 GenerateDivRemWithAnyConstant(instruction);
5405 }
5406}
5407
5408void InstructionCodeGeneratorARM64::GenerateIntRem(HRem* instruction) {
5409 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
5410 << instruction->GetResultType();
5411
5412 if (instruction->GetLocations()->InAt(1).IsConstant()) {
5413 GenerateIntRemForConstDenom(instruction);
5414 } else {
5415 Register out = OutputRegister(instruction);
5416 Register dividend = InputRegisterAt(instruction, 0);
5417 Register divisor = InputRegisterAt(instruction, 1);
5418 UseScratchRegisterScope temps(GetVIXLAssembler());
5419 Register temp = temps.AcquireSameSizeAs(out);
5420 __ Sdiv(temp, dividend, divisor);
5421 __ Msub(out, temp, divisor, dividend);
5422 }
5423}
5424
Serban Constantinescu02164b32014-11-13 14:05:07 +00005425void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005426 DataType::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005427
Serban Constantinescu02164b32014-11-13 14:05:07 +00005428 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005429 case DataType::Type::kInt32:
5430 case DataType::Type::kInt64: {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005431 GenerateIntRem(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005432 break;
5433 }
5434
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005435 case DataType::Type::kFloat32:
5436 case DataType::Type::kFloat64: {
5437 QuickEntrypointEnum entrypoint =
5438 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005439 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005440 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00005441 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5442 } else {
5443 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5444 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005445 break;
5446 }
5447
Serban Constantinescu02164b32014-11-13 14:05:07 +00005448 default:
5449 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005450 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005451 }
5452}
5453
Aart Bik1f8d51b2018-02-15 10:42:37 -08005454void LocationsBuilderARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005455 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005456}
5457
Aart Bik1f8d51b2018-02-15 10:42:37 -08005458void InstructionCodeGeneratorARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005459 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005460}
5461
5462void LocationsBuilderARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005463 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005464}
5465
5466void InstructionCodeGeneratorARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005467 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005468}
5469
Aart Bik3dad3412018-02-28 12:01:46 -08005470void LocationsBuilderARM64::VisitAbs(HAbs* abs) {
5471 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
5472 switch (abs->GetResultType()) {
5473 case DataType::Type::kInt32:
5474 case DataType::Type::kInt64:
5475 locations->SetInAt(0, Location::RequiresRegister());
5476 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5477 break;
5478 case DataType::Type::kFloat32:
5479 case DataType::Type::kFloat64:
5480 locations->SetInAt(0, Location::RequiresFpuRegister());
5481 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5482 break;
5483 default:
5484 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5485 }
5486}
5487
5488void InstructionCodeGeneratorARM64::VisitAbs(HAbs* abs) {
5489 switch (abs->GetResultType()) {
5490 case DataType::Type::kInt32:
5491 case DataType::Type::kInt64: {
5492 Register in_reg = InputRegisterAt(abs, 0);
5493 Register out_reg = OutputRegister(abs);
5494 __ Cmp(in_reg, Operand(0));
5495 __ Cneg(out_reg, in_reg, lt);
5496 break;
5497 }
5498 case DataType::Type::kFloat32:
5499 case DataType::Type::kFloat64: {
Evgeny Astigeevich7d48dcd2019-10-16 12:46:28 +01005500 VRegister in_reg = InputFPRegisterAt(abs, 0);
5501 VRegister out_reg = OutputFPRegister(abs);
Aart Bik3dad3412018-02-28 12:01:46 -08005502 __ Fabs(out_reg, in_reg);
5503 break;
5504 }
5505 default:
5506 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5507 }
5508}
5509
Igor Murashkind01745e2017-04-05 16:40:31 -07005510void LocationsBuilderARM64::VisitConstructorFence(HConstructorFence* constructor_fence) {
5511 constructor_fence->SetLocations(nullptr);
5512}
5513
5514void InstructionCodeGeneratorARM64::VisitConstructorFence(
5515 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
5516 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
5517}
5518
Calin Juravle27df7582015-04-17 19:12:31 +01005519void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5520 memory_barrier->SetLocations(nullptr);
5521}
5522
5523void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005524 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005525}
5526
Alexandre Rames5319def2014-10-23 10:03:10 +01005527void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005528 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005529 DataType::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005530 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005531}
5532
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005533void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005534 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005535}
5536
5537void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5538 instruction->SetLocations(nullptr);
5539}
5540
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005541void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005542 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005543}
5544
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005545void LocationsBuilderARM64::VisitRor(HRor* ror) {
5546 HandleBinaryOp(ror);
5547}
5548
5549void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5550 HandleBinaryOp(ror);
5551}
5552
Serban Constantinescu02164b32014-11-13 14:05:07 +00005553void LocationsBuilderARM64::VisitShl(HShl* shl) {
5554 HandleShift(shl);
5555}
5556
5557void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5558 HandleShift(shl);
5559}
5560
5561void LocationsBuilderARM64::VisitShr(HShr* shr) {
5562 HandleShift(shr);
5563}
5564
5565void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5566 HandleShift(shr);
5567}
5568
Alexandre Rames5319def2014-10-23 10:03:10 +01005569void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005570 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005571}
5572
5573void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005574 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005575}
5576
Alexandre Rames67555f72014-11-18 10:55:16 +00005577void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005578 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005579}
5580
5581void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005582 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005583}
5584
5585void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005586 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005587}
5588
Alexandre Rames67555f72014-11-18 10:55:16 +00005589void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005590 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005591}
5592
Vladimir Marko552a1342017-10-31 10:56:47 +00005593void LocationsBuilderARM64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5594 codegen_->CreateStringBuilderAppendLocations(instruction, LocationFrom(x0));
5595}
5596
5597void InstructionCodeGeneratorARM64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5598 __ Mov(w0, instruction->GetFormat()->GetValue());
5599 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
5600}
5601
Calin Juravlee460d1d2015-09-29 04:52:17 +01005602void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5603 HUnresolvedInstanceFieldGet* instruction) {
5604 FieldAccessCallingConventionARM64 calling_convention;
5605 codegen_->CreateUnresolvedFieldLocationSummary(
5606 instruction, instruction->GetFieldType(), calling_convention);
5607}
5608
5609void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5610 HUnresolvedInstanceFieldGet* instruction) {
5611 FieldAccessCallingConventionARM64 calling_convention;
5612 codegen_->GenerateUnresolvedFieldAccess(instruction,
5613 instruction->GetFieldType(),
5614 instruction->GetFieldIndex(),
5615 instruction->GetDexPc(),
5616 calling_convention);
5617}
5618
5619void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5620 HUnresolvedInstanceFieldSet* instruction) {
5621 FieldAccessCallingConventionARM64 calling_convention;
5622 codegen_->CreateUnresolvedFieldLocationSummary(
5623 instruction, instruction->GetFieldType(), calling_convention);
5624}
5625
5626void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5627 HUnresolvedInstanceFieldSet* instruction) {
5628 FieldAccessCallingConventionARM64 calling_convention;
5629 codegen_->GenerateUnresolvedFieldAccess(instruction,
5630 instruction->GetFieldType(),
5631 instruction->GetFieldIndex(),
5632 instruction->GetDexPc(),
5633 calling_convention);
5634}
5635
5636void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5637 HUnresolvedStaticFieldGet* instruction) {
5638 FieldAccessCallingConventionARM64 calling_convention;
5639 codegen_->CreateUnresolvedFieldLocationSummary(
5640 instruction, instruction->GetFieldType(), calling_convention);
5641}
5642
5643void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5644 HUnresolvedStaticFieldGet* instruction) {
5645 FieldAccessCallingConventionARM64 calling_convention;
5646 codegen_->GenerateUnresolvedFieldAccess(instruction,
5647 instruction->GetFieldType(),
5648 instruction->GetFieldIndex(),
5649 instruction->GetDexPc(),
5650 calling_convention);
5651}
5652
5653void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5654 HUnresolvedStaticFieldSet* instruction) {
5655 FieldAccessCallingConventionARM64 calling_convention;
5656 codegen_->CreateUnresolvedFieldLocationSummary(
5657 instruction, instruction->GetFieldType(), calling_convention);
5658}
5659
5660void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5661 HUnresolvedStaticFieldSet* instruction) {
5662 FieldAccessCallingConventionARM64 calling_convention;
5663 codegen_->GenerateUnresolvedFieldAccess(instruction,
5664 instruction->GetFieldType(),
5665 instruction->GetFieldIndex(),
5666 instruction->GetDexPc(),
5667 calling_convention);
5668}
5669
Alexandre Rames5319def2014-10-23 10:03:10 +01005670void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005671 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5672 instruction, LocationSummary::kCallOnSlowPath);
Artem Serov7957d952017-04-04 15:44:09 +01005673 // In suspend check slow path, usually there are no caller-save registers at all.
5674 // If SIMD instructions are present, however, we force spilling all live SIMD
5675 // registers in full width (since the runtime only saves/restores lower part).
5676 locations->SetCustomSlowPathCallerSaves(
5677 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexandre Rames5319def2014-10-23 10:03:10 +01005678}
5679
5680void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005681 HBasicBlock* block = instruction->GetBlock();
5682 if (block->GetLoopInformation() != nullptr) {
5683 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5684 // The back edge will generate the suspend check.
5685 return;
5686 }
5687 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5688 // The goto will generate the suspend check.
5689 return;
5690 }
5691 GenerateSuspendCheck(instruction, nullptr);
Andreas Gampe3db70682018-12-26 15:12:03 -08005692 codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005693}
5694
Alexandre Rames67555f72014-11-18 10:55:16 +00005695void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005696 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5697 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005698 InvokeRuntimeCallingConvention calling_convention;
5699 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5700}
5701
5702void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005703 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005704 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005705}
5706
5707void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
5708 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005709 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005710 DataType::Type input_type = conversion->GetInputType();
5711 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005712 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5713 << input_type << " -> " << result_type;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005714 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
5715 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005716 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5717 }
5718
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005719 if (DataType::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005720 locations->SetInAt(0, Location::RequiresFpuRegister());
5721 } else {
5722 locations->SetInAt(0, Location::RequiresRegister());
5723 }
5724
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005725 if (DataType::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005726 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5727 } else {
5728 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5729 }
5730}
5731
5732void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005733 DataType::Type result_type = conversion->GetResultType();
5734 DataType::Type input_type = conversion->GetInputType();
Alexandre Rames67555f72014-11-18 10:55:16 +00005735
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005736 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5737 << input_type << " -> " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005738
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005739 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
5740 int result_size = DataType::Size(result_type);
5741 int input_size = DataType::Size(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00005742 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005743 Register output = OutputRegister(conversion);
5744 Register source = InputRegisterAt(conversion, 0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005745 if (result_type == DataType::Type::kInt32 && input_type == DataType::Type::kInt64) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01005746 // 'int' values are used directly as W registers, discarding the top
5747 // bits, so we don't need to sign-extend and can just perform a move.
5748 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
5749 // top 32 bits of the target register. We theoretically could leave those
5750 // bits unchanged, but we would have to make sure that no code uses a
5751 // 32bit input value as a 64bit value assuming that the top 32 bits are
5752 // zero.
5753 __ Mov(output.W(), source.W());
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005754 } else if (DataType::IsUnsignedType(result_type) ||
5755 (DataType::IsUnsignedType(input_type) && input_size < result_size)) {
5756 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, result_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005757 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00005758 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005759 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005760 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005761 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005762 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
5763 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005764 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005765 } else if (DataType::IsFloatingPointType(result_type) &&
5766 DataType::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005767 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
5768 } else {
5769 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
5770 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005771 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00005772}
Alexandre Rames67555f72014-11-18 10:55:16 +00005773
Serban Constantinescu02164b32014-11-13 14:05:07 +00005774void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
5775 HandleShift(ushr);
5776}
5777
5778void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
5779 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00005780}
5781
5782void LocationsBuilderARM64::VisitXor(HXor* instruction) {
5783 HandleBinaryOp(instruction);
5784}
5785
5786void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
5787 HandleBinaryOp(instruction);
5788}
5789
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005790void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005791 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005792 LOG(FATAL) << "Unreachable";
5793}
5794
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005795void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005796 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005797 LOG(FATAL) << "Unreachable";
5798}
5799
Mark Mendellfe57faa2015-09-18 09:26:15 -04005800// Simple implementation of packed switch - generate cascaded compare/jumps.
5801void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5802 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005803 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005804 locations->SetInAt(0, Location::RequiresRegister());
5805}
5806
5807void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5808 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08005809 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04005810 Register value_reg = InputRegisterAt(switch_instr, 0);
5811 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
5812
Zheng Xu3927c8b2015-11-18 17:46:25 +08005813 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005814 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08005815 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
5816 // make sure we don't emit it if the target may run out of range.
5817 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
5818 // ranges and emit the tables only as required.
5819 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04005820
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005821 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08005822 // Current instruction id is an upper bound of the number of HIRs in the graph.
5823 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
5824 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005825 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5826 Register temp = temps.AcquireW();
5827 __ Subs(temp, value_reg, Operand(lower_bound));
5828
Zheng Xu3927c8b2015-11-18 17:46:25 +08005829 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005830 // Jump to successors[0] if value == lower_bound.
5831 __ B(eq, codegen_->GetLabelOf(successors[0]));
5832 int32_t last_index = 0;
5833 for (; num_entries - last_index > 2; last_index += 2) {
5834 __ Subs(temp, temp, Operand(2));
5835 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
5836 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
5837 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
5838 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
5839 }
5840 if (num_entries - last_index == 2) {
5841 // The last missing case_value.
5842 __ Cmp(temp, Operand(1));
5843 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08005844 }
5845
5846 // And the default for any other value.
5847 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
5848 __ B(codegen_->GetLabelOf(default_block));
5849 }
5850 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01005851 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08005852
5853 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5854
5855 // Below instructions should use at most one blocked register. Since there are two blocked
5856 // registers, we are free to block one.
5857 Register temp_w = temps.AcquireW();
5858 Register index;
5859 // Remove the bias.
5860 if (lower_bound != 0) {
5861 index = temp_w;
5862 __ Sub(index, value_reg, Operand(lower_bound));
5863 } else {
5864 index = value_reg;
5865 }
5866
5867 // Jump to default block if index is out of the range.
5868 __ Cmp(index, Operand(num_entries));
5869 __ B(hs, codegen_->GetLabelOf(default_block));
5870
5871 // In current VIXL implementation, it won't require any blocked registers to encode the
5872 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
5873 // register pressure.
5874 Register table_base = temps.AcquireX();
5875 // Load jump offset from the table.
5876 __ Adr(table_base, jump_table->GetTableStartLabel());
5877 Register jump_offset = temp_w;
5878 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5879
5880 // Jump to target block by branching to table_base(pc related) + offset.
5881 Register target_address = table_base;
5882 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5883 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005884 }
5885}
5886
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005887void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
5888 HInstruction* instruction,
5889 Location out,
5890 uint32_t offset,
5891 Location maybe_temp,
5892 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005893 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00005894 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005895 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005896 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005897 if (kUseBakerReadBarrier) {
5898 // Load with fast path based Baker's read barrier.
5899 // /* HeapReference<Object> */ out = *(out + offset)
5900 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5901 out,
5902 out_reg,
5903 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005904 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08005905 /* needs_null_check= */ false,
5906 /* use_load_acquire= */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005907 } else {
5908 // Load with slow path based read barrier.
5909 // Save the value of `out` into `maybe_temp` before overwriting it
5910 // in the following move operation, as we will need it for the
5911 // read barrier below.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005912 Register temp_reg = RegisterFrom(maybe_temp, type);
Roland Levillain44015862016-01-22 11:47:17 +00005913 __ Mov(temp_reg, out_reg);
5914 // /* HeapReference<Object> */ out = *(out + offset)
5915 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5916 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5917 }
5918 } else {
5919 // Plain load with no read barrier.
5920 // /* HeapReference<Object> */ out = *(out + offset)
5921 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5922 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5923 }
5924}
5925
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005926void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
5927 HInstruction* instruction,
5928 Location out,
5929 Location obj,
5930 uint32_t offset,
5931 Location maybe_temp,
5932 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005933 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00005934 Register out_reg = RegisterFrom(out, type);
5935 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005936 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005937 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005938 if (kUseBakerReadBarrier) {
5939 // Load with fast path based Baker's read barrier.
Roland Levillain44015862016-01-22 11:47:17 +00005940 // /* HeapReference<Object> */ out = *(obj + offset)
5941 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5942 out,
5943 obj_reg,
5944 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005945 maybe_temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08005946 /* needs_null_check= */ false,
5947 /* use_load_acquire= */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005948 } else {
5949 // Load with slow path based read barrier.
5950 // /* HeapReference<Object> */ out = *(obj + offset)
5951 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5952 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5953 }
5954 } else {
5955 // Plain load with no read barrier.
5956 // /* HeapReference<Object> */ out = *(obj + offset)
5957 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5958 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5959 }
5960}
5961
Vladimir Markoca1e0382018-04-11 09:58:41 +00005962void CodeGeneratorARM64::GenerateGcRootFieldLoad(
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005963 HInstruction* instruction,
5964 Location root,
5965 Register obj,
5966 uint32_t offset,
5967 vixl::aarch64::Label* fixup_label,
5968 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005969 DCHECK(fixup_label == nullptr || offset == 0u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005970 Register root_reg = RegisterFrom(root, DataType::Type::kReference);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005971 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005972 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005973 if (kUseBakerReadBarrier) {
5974 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00005975 // Baker's read barrier are used.
Roland Levillain44015862016-01-22 11:47:17 +00005976
Vladimir Marko008e09f32018-08-06 15:42:43 +01005977 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
5978 // the Marking Register) to decide whether we need to enter
5979 // the slow path to mark the GC root.
5980 //
5981 // We use shared thunks for the slow path; shared within the method
5982 // for JIT, across methods for AOT. That thunk checks the reference
5983 // and jumps to the entrypoint if needed.
5984 //
5985 // lr = &return_address;
5986 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5987 // if (mr) { // Thread::Current()->GetIsGcMarking()
5988 // goto gc_root_thunk<root_reg>(lr)
5989 // }
5990 // return_address:
Roland Levillainba650a42017-03-06 13:52:32 +00005991
Vladimir Marko008e09f32018-08-06 15:42:43 +01005992 UseScratchRegisterScope temps(GetVIXLAssembler());
5993 DCHECK(temps.IsAvailable(ip0));
5994 DCHECK(temps.IsAvailable(ip1));
5995 temps.Exclude(ip0, ip1);
5996 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(root_reg.GetCode());
Roland Levillain44015862016-01-22 11:47:17 +00005997
Vladimir Marko008e09f32018-08-06 15:42:43 +01005998 ExactAssemblyScope guard(GetVIXLAssembler(), 3 * vixl::aarch64::kInstructionSize);
5999 vixl::aarch64::Label return_address;
6000 __ adr(lr, &return_address);
6001 if (fixup_label != nullptr) {
6002 __ bind(fixup_label);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006003 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006004 static_assert(BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_OFFSET == -8,
Vladimir Marko94796f82018-08-08 15:15:33 +01006005 "GC root LDR must be 2 instructions (8B) before the return address label.");
Vladimir Marko008e09f32018-08-06 15:42:43 +01006006 __ ldr(root_reg, MemOperand(obj.X(), offset));
6007 EmitBakerReadBarrierCbnz(custom_data);
6008 __ bind(&return_address);
Roland Levillain44015862016-01-22 11:47:17 +00006009 } else {
6010 // GC root loaded through a slow path for read barriers other
6011 // than Baker's.
6012 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006013 if (fixup_label == nullptr) {
6014 __ Add(root_reg.X(), obj.X(), offset);
6015 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006016 EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006017 }
Roland Levillain44015862016-01-22 11:47:17 +00006018 // /* mirror::Object* */ root = root->Read()
Vladimir Markoca1e0382018-04-11 09:58:41 +00006019 GenerateReadBarrierForRootSlow(instruction, root, root);
Roland Levillain44015862016-01-22 11:47:17 +00006020 }
6021 } else {
6022 // Plain GC root load with no read barrier.
6023 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006024 if (fixup_label == nullptr) {
6025 __ Ldr(root_reg, MemOperand(obj, offset));
6026 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006027 EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006028 }
Roland Levillain44015862016-01-22 11:47:17 +00006029 // Note that GC roots are not affected by heap poisoning, thus we
6030 // do not have to unpoison `root_reg` here.
6031 }
Andreas Gampe3db70682018-12-26 15:12:03 -08006032 MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__);
Roland Levillain44015862016-01-22 11:47:17 +00006033}
6034
Vladimir Marko94796f82018-08-08 15:15:33 +01006035void CodeGeneratorARM64::GenerateUnsafeCasOldValueMovWithBakerReadBarrier(
6036 vixl::aarch64::Register marked,
6037 vixl::aarch64::Register old_value) {
6038 DCHECK(kEmitCompilerReadBarrier);
6039 DCHECK(kUseBakerReadBarrier);
6040
6041 // Similar to the Baker RB path in GenerateGcRootFieldLoad(), with a MOV instead of LDR.
6042 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(marked.GetCode());
6043
6044 ExactAssemblyScope guard(GetVIXLAssembler(), 3 * vixl::aarch64::kInstructionSize);
6045 vixl::aarch64::Label return_address;
6046 __ adr(lr, &return_address);
6047 static_assert(BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_OFFSET == -8,
6048 "GC root LDR must be 2 instructions (8B) before the return address label.");
6049 __ mov(marked, old_value);
6050 EmitBakerReadBarrierCbnz(custom_data);
6051 __ bind(&return_address);
6052}
6053
Roland Levillain44015862016-01-22 11:47:17 +00006054void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6055 Location ref,
Vladimir Marko248141f2018-08-10 10:40:07 +01006056 vixl::aarch64::Register obj,
6057 const vixl::aarch64::MemOperand& src,
Roland Levillain44015862016-01-22 11:47:17 +00006058 bool needs_null_check,
6059 bool use_load_acquire) {
6060 DCHECK(kEmitCompilerReadBarrier);
6061 DCHECK(kUseBakerReadBarrier);
6062
Vladimir Marko0ecac682018-08-07 10:40:38 +01006063 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6064 // Marking Register) to decide whether we need to enter the slow
6065 // path to mark the reference. Then, in the slow path, check the
6066 // gray bit in the lock word of the reference's holder (`obj`) to
6067 // decide whether to mark `ref` or not.
6068 //
6069 // We use shared thunks for the slow path; shared within the method
6070 // for JIT, across methods for AOT. That thunk checks the holder
6071 // and jumps to the entrypoint if needed. If the holder is not gray,
6072 // it creates a fake dependency and returns to the LDR instruction.
6073 //
6074 // lr = &gray_return_address;
6075 // if (mr) { // Thread::Current()->GetIsGcMarking()
6076 // goto field_thunk<holder_reg, base_reg, use_load_acquire>(lr)
6077 // }
6078 // not_gray_return_address:
6079 // // Original reference load. If the offset is too large to fit
6080 // // into LDR, we use an adjusted base register here.
6081 // HeapReference<mirror::Object> reference = *(obj+offset);
6082 // gray_return_address:
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006083
Vladimir Marko248141f2018-08-10 10:40:07 +01006084 DCHECK(src.GetAddrMode() == vixl::aarch64::Offset);
6085 DCHECK_ALIGNED(src.GetOffset(), sizeof(mirror::HeapReference<mirror::Object>));
6086
6087 UseScratchRegisterScope temps(GetVIXLAssembler());
6088 DCHECK(temps.IsAvailable(ip0));
6089 DCHECK(temps.IsAvailable(ip1));
6090 temps.Exclude(ip0, ip1);
6091 uint32_t custom_data = use_load_acquire
6092 ? EncodeBakerReadBarrierAcquireData(src.GetBaseRegister().GetCode(), obj.GetCode())
6093 : EncodeBakerReadBarrierFieldData(src.GetBaseRegister().GetCode(), obj.GetCode());
6094
6095 {
6096 ExactAssemblyScope guard(GetVIXLAssembler(),
6097 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6098 vixl::aarch64::Label return_address;
6099 __ adr(lr, &return_address);
6100 EmitBakerReadBarrierCbnz(custom_data);
6101 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6102 "Field LDR must be 1 instruction (4B) before the return address label; "
6103 " 2 instructions (8B) for heap poisoning.");
6104 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
6105 if (use_load_acquire) {
6106 DCHECK_EQ(src.GetOffset(), 0);
6107 __ ldar(ref_reg, src);
6108 } else {
6109 __ ldr(ref_reg, src);
6110 }
6111 if (needs_null_check) {
6112 MaybeRecordImplicitNullCheck(instruction);
6113 }
6114 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
6115 // macro instructions disallowed in ExactAssemblyScope.
6116 if (kPoisonHeapReferences) {
6117 __ neg(ref_reg, Operand(ref_reg));
6118 }
6119 __ bind(&return_address);
6120 }
Andreas Gampe3db70682018-12-26 15:12:03 -08006121 MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__, /* temp_loc= */ LocationFrom(ip1));
Vladimir Marko248141f2018-08-10 10:40:07 +01006122}
6123
6124void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6125 Location ref,
6126 Register obj,
6127 uint32_t offset,
6128 Location maybe_temp,
6129 bool needs_null_check,
6130 bool use_load_acquire) {
Vladimir Marko0ecac682018-08-07 10:40:38 +01006131 DCHECK_ALIGNED(offset, sizeof(mirror::HeapReference<mirror::Object>));
6132 Register base = obj;
6133 if (use_load_acquire) {
6134 DCHECK(maybe_temp.IsRegister());
6135 base = WRegisterFrom(maybe_temp);
6136 __ Add(base, obj, offset);
6137 offset = 0u;
6138 } else if (offset >= kReferenceLoadMinFarOffset) {
6139 DCHECK(maybe_temp.IsRegister());
6140 base = WRegisterFrom(maybe_temp);
6141 static_assert(IsPowerOfTwo(kReferenceLoadMinFarOffset), "Expecting a power of 2.");
6142 __ Add(base, obj, Operand(offset & ~(kReferenceLoadMinFarOffset - 1u)));
6143 offset &= (kReferenceLoadMinFarOffset - 1u);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006144 }
Vladimir Marko248141f2018-08-10 10:40:07 +01006145 MemOperand src(base.X(), offset);
6146 GenerateFieldLoadWithBakerReadBarrier(
6147 instruction, ref, obj, src, needs_null_check, use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006148}
6149
Artem Serov0806f582018-10-11 20:14:20 +01006150void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HArrayGet* instruction,
6151 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006152 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006153 uint32_t data_offset,
6154 Location index,
Roland Levillain44015862016-01-22 11:47:17 +00006155 bool needs_null_check) {
6156 DCHECK(kEmitCompilerReadBarrier);
6157 DCHECK(kUseBakerReadBarrier);
6158
Vladimir Marko66d691d2017-04-07 17:53:39 +01006159 static_assert(
6160 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6161 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006162 size_t scale_factor = DataType::SizeShift(DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006163
Vladimir Marko008e09f32018-08-06 15:42:43 +01006164 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6165 // Marking Register) to decide whether we need to enter the slow
6166 // path to mark the reference. Then, in the slow path, check the
6167 // gray bit in the lock word of the reference's holder (`obj`) to
6168 // decide whether to mark `ref` or not.
6169 //
6170 // We use shared thunks for the slow path; shared within the method
6171 // for JIT, across methods for AOT. That thunk checks the holder
6172 // and jumps to the entrypoint if needed. If the holder is not gray,
6173 // it creates a fake dependency and returns to the LDR instruction.
6174 //
6175 // lr = &gray_return_address;
6176 // if (mr) { // Thread::Current()->GetIsGcMarking()
6177 // goto array_thunk<base_reg>(lr)
6178 // }
6179 // not_gray_return_address:
6180 // // Original reference load. If the offset is too large to fit
6181 // // into LDR, we use an adjusted base register here.
6182 // HeapReference<mirror::Object> reference = data[index];
6183 // gray_return_address:
Vladimir Marko66d691d2017-04-07 17:53:39 +01006184
Vladimir Marko008e09f32018-08-06 15:42:43 +01006185 DCHECK(index.IsValid());
6186 Register index_reg = RegisterFrom(index, DataType::Type::kInt32);
6187 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006188
Vladimir Marko008e09f32018-08-06 15:42:43 +01006189 UseScratchRegisterScope temps(GetVIXLAssembler());
6190 DCHECK(temps.IsAvailable(ip0));
6191 DCHECK(temps.IsAvailable(ip1));
6192 temps.Exclude(ip0, ip1);
Artem Serov0806f582018-10-11 20:14:20 +01006193
6194 Register temp;
6195 if (instruction->GetArray()->IsIntermediateAddress()) {
6196 // We do not need to compute the intermediate address from the array: the
6197 // input instruction has done it already. See the comment in
6198 // `TryExtractArrayAccessAddress()`.
6199 if (kIsDebugBuild) {
6200 HIntermediateAddress* interm_addr = instruction->GetArray()->AsIntermediateAddress();
6201 DCHECK_EQ(interm_addr->GetOffset()->AsIntConstant()->GetValueAsUint64(), data_offset);
6202 }
6203 temp = obj;
6204 } else {
6205 temp = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
6206 __ Add(temp.X(), obj.X(), Operand(data_offset));
6207 }
6208
Vladimir Marko008e09f32018-08-06 15:42:43 +01006209 uint32_t custom_data = EncodeBakerReadBarrierArrayData(temp.GetCode());
Vladimir Marko66d691d2017-04-07 17:53:39 +01006210
Vladimir Marko008e09f32018-08-06 15:42:43 +01006211 {
6212 ExactAssemblyScope guard(GetVIXLAssembler(),
6213 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6214 vixl::aarch64::Label return_address;
6215 __ adr(lr, &return_address);
6216 EmitBakerReadBarrierCbnz(custom_data);
6217 static_assert(BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6218 "Array LDR must be 1 instruction (4B) before the return address label; "
6219 " 2 instructions (8B) for heap poisoning.");
6220 __ ldr(ref_reg, MemOperand(temp.X(), index_reg.X(), LSL, scale_factor));
6221 DCHECK(!needs_null_check); // The thunk cannot handle the null check.
6222 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
6223 // macro instructions disallowed in ExactAssemblyScope.
6224 if (kPoisonHeapReferences) {
6225 __ neg(ref_reg, Operand(ref_reg));
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006226 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006227 __ bind(&return_address);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006228 }
Andreas Gampe3db70682018-12-26 15:12:03 -08006229 MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__, /* temp_loc= */ LocationFrom(ip1));
Roland Levillain44015862016-01-22 11:47:17 +00006230}
6231
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006232void CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck(int code, Location temp_loc) {
6233 // The following condition is a compile-time one, so it does not have a run-time cost.
6234 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && kIsDebugBuild) {
6235 // The following condition is a run-time one; it is executed after the
6236 // previous compile-time test, to avoid penalizing non-debug builds.
6237 if (GetCompilerOptions().EmitRunTimeChecksInDebugMode()) {
6238 UseScratchRegisterScope temps(GetVIXLAssembler());
6239 Register temp = temp_loc.IsValid() ? WRegisterFrom(temp_loc) : temps.AcquireW();
6240 GetAssembler()->GenerateMarkingRegisterCheck(temp, code);
6241 }
6242 }
6243}
6244
Roland Levillain44015862016-01-22 11:47:17 +00006245void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
6246 Location out,
6247 Location ref,
6248 Location obj,
6249 uint32_t offset,
6250 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006251 DCHECK(kEmitCompilerReadBarrier);
6252
Roland Levillain44015862016-01-22 11:47:17 +00006253 // Insert a slow path based read barrier *after* the reference load.
6254 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006255 // If heap poisoning is enabled, the unpoisoning of the loaded
6256 // reference will be carried out by the runtime within the slow
6257 // path.
6258 //
6259 // Note that `ref` currently does not get unpoisoned (when heap
6260 // poisoning is enabled), which is alright as the `ref` argument is
6261 // not used by the artReadBarrierSlow entry point.
6262 //
6263 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006264 SlowPathCodeARM64* slow_path = new (GetScopedAllocator())
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006265 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
6266 AddSlowPath(slow_path);
6267
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006268 __ B(slow_path->GetEntryLabel());
6269 __ Bind(slow_path->GetExitLabel());
6270}
6271
Roland Levillain44015862016-01-22 11:47:17 +00006272void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6273 Location out,
6274 Location ref,
6275 Location obj,
6276 uint32_t offset,
6277 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006278 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00006279 // Baker's read barriers shall be handled by the fast path
6280 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
6281 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006282 // If heap poisoning is enabled, unpoisoning will be taken care of
6283 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00006284 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006285 } else if (kPoisonHeapReferences) {
6286 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
6287 }
6288}
6289
Roland Levillain44015862016-01-22 11:47:17 +00006290void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6291 Location out,
6292 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006293 DCHECK(kEmitCompilerReadBarrier);
6294
Roland Levillain44015862016-01-22 11:47:17 +00006295 // Insert a slow path based read barrier *after* the GC root load.
6296 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006297 // Note that GC roots are not affected by heap poisoning, so we do
6298 // not need to do anything special for this here.
6299 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006300 new (GetScopedAllocator()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006301 AddSlowPath(slow_path);
6302
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006303 __ B(slow_path->GetEntryLabel());
6304 __ Bind(slow_path->GetExitLabel());
6305}
6306
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006307void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
6308 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006309 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006310 locations->SetInAt(0, Location::RequiresRegister());
6311 locations->SetOut(Location::RequiresRegister());
6312}
6313
6314void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
6315 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00006316 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006317 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006318 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006319 __ Ldr(XRegisterFrom(locations->Out()),
6320 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006321 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006322 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006323 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006324 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
6325 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006326 __ Ldr(XRegisterFrom(locations->Out()),
6327 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006328 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006329}
6330
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006331static void PatchJitRootUse(uint8_t* code,
6332 const uint8_t* roots_data,
6333 vixl::aarch64::Literal<uint32_t>* literal,
6334 uint64_t index_in_table) {
6335 uint32_t literal_offset = literal->GetOffset();
6336 uintptr_t address =
6337 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
6338 uint8_t* data = code + literal_offset;
6339 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
6340}
6341
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006342void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
6343 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006344 const StringReference& string_reference = entry.first;
6345 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006346 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006347 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006348 }
6349 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006350 const TypeReference& type_reference = entry.first;
6351 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006352 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006353 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006354 }
6355}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006356
Alexandre Rames67555f72014-11-18 10:55:16 +00006357#undef __
6358#undef QUICK_ENTRY_POINT
6359
Vladimir Markoca1e0382018-04-11 09:58:41 +00006360#define __ assembler.GetVIXLAssembler()->
6361
6362static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler,
6363 vixl::aarch64::Register base_reg,
6364 vixl::aarch64::MemOperand& lock_word,
Vladimir Marko7a695052018-04-12 10:26:50 +01006365 vixl::aarch64::Label* slow_path,
6366 vixl::aarch64::Label* throw_npe = nullptr) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006367 // Load the lock word containing the rb_state.
6368 __ Ldr(ip0.W(), lock_word);
6369 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01006370 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Vladimir Markoca1e0382018-04-11 09:58:41 +00006371 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6372 __ Tbnz(ip0.W(), LockWord::kReadBarrierStateShift, slow_path);
6373 static_assert(
6374 BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET,
6375 "Field and array LDR offsets must be the same to reuse the same code.");
Vladimir Marko7a695052018-04-12 10:26:50 +01006376 // To throw NPE, we return to the fast path; the artificial dependence below does not matter.
6377 if (throw_npe != nullptr) {
6378 __ Bind(throw_npe);
6379 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006380 // Adjust the return address back to the LDR (1 instruction; 2 for heap poisoning).
6381 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6382 "Field LDR must be 1 instruction (4B) before the return address label; "
6383 " 2 instructions (8B) for heap poisoning.");
6384 __ Add(lr, lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6385 // Introduce a dependency on the lock_word including rb_state,
6386 // to prevent load-load reordering, and without using
6387 // a memory barrier (which would be more expensive).
6388 __ Add(base_reg, base_reg, Operand(ip0, LSR, 32));
6389 __ Br(lr); // And return back to the function.
6390 // Note: The fake dependency is unnecessary for the slow path.
6391}
6392
6393// Load the read barrier introspection entrypoint in register `entrypoint`.
6394static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler,
6395 vixl::aarch64::Register entrypoint) {
6396 // entrypoint = Thread::Current()->pReadBarrierMarkReg16, i.e. pReadBarrierMarkIntrospection.
6397 DCHECK_EQ(ip0.GetCode(), 16u);
6398 const int32_t entry_point_offset =
6399 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ip0.GetCode());
6400 __ Ldr(entrypoint, MemOperand(tr, entry_point_offset));
6401}
6402
6403void CodeGeneratorARM64::CompileBakerReadBarrierThunk(Arm64Assembler& assembler,
6404 uint32_t encoded_data,
6405 /*out*/ std::string* debug_name) {
6406 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
6407 switch (kind) {
Vladimir Marko0ecac682018-08-07 10:40:38 +01006408 case BakerReadBarrierKind::kField:
6409 case BakerReadBarrierKind::kAcquire: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006410 auto base_reg =
6411 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6412 CheckValidReg(base_reg.GetCode());
6413 auto holder_reg =
6414 Register::GetXRegFromCode(BakerReadBarrierSecondRegField::Decode(encoded_data));
6415 CheckValidReg(holder_reg.GetCode());
6416 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6417 temps.Exclude(ip0, ip1);
Roland Levillain988c3912019-09-25 19:33:35 +01006418 // In the case of a field load (with relaxed semantic), if `base_reg` differs from
6419 // `holder_reg`, the offset was too large and we must have emitted (during the construction
6420 // of the HIR graph, see `art::HInstructionBuilder::BuildInstanceFieldAccess`) and preserved
6421 // (see `art::PrepareForRegisterAllocation::VisitNullCheck`) an explicit null check before
6422 // the load. Otherwise, for implicit null checks, we need to null-check the holder as we do
6423 // not necessarily do that check before going to the thunk.
6424 //
6425 // In the case of a field load with load-acquire semantics (where `base_reg` always differs
6426 // from `holder_reg`), we also need an explicit null check when implicit null checks are
6427 // allowed, as we do not emit one before going to the thunk.
Vladimir Marko7a695052018-04-12 10:26:50 +01006428 vixl::aarch64::Label throw_npe_label;
6429 vixl::aarch64::Label* throw_npe = nullptr;
Roland Levillain988c3912019-09-25 19:33:35 +01006430 if (GetCompilerOptions().GetImplicitNullChecks() &&
6431 (holder_reg.Is(base_reg) || (kind == BakerReadBarrierKind::kAcquire))) {
Vladimir Marko7a695052018-04-12 10:26:50 +01006432 throw_npe = &throw_npe_label;
6433 __ Cbz(holder_reg.W(), throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006434 }
Vladimir Marko7a695052018-04-12 10:26:50 +01006435 // Check if the holder is gray and, if not, add fake dependency to the base register
6436 // and return to the LDR instruction to load the reference. Otherwise, use introspection
6437 // to load the reference and call the entrypoint that performs further checks on the
6438 // reference and marks it if needed.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006439 vixl::aarch64::Label slow_path;
6440 MemOperand lock_word(holder_reg, mirror::Object::MonitorOffset().Int32Value());
Vladimir Marko7a695052018-04-12 10:26:50 +01006441 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006442 __ Bind(&slow_path);
Vladimir Marko0ecac682018-08-07 10:40:38 +01006443 if (kind == BakerReadBarrierKind::kField) {
6444 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6445 __ Ldr(ip0.W(), ldr_address); // Load the LDR (immediate) unsigned offset.
6446 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6447 __ Ubfx(ip0.W(), ip0.W(), 10, 12); // Extract the offset.
6448 __ Ldr(ip0.W(), MemOperand(base_reg, ip0, LSL, 2)); // Load the reference.
6449 } else {
6450 DCHECK(kind == BakerReadBarrierKind::kAcquire);
6451 DCHECK(!base_reg.Is(holder_reg));
6452 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6453 __ Ldar(ip0.W(), MemOperand(base_reg));
6454 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006455 // Do not unpoison. With heap poisoning enabled, the entrypoint expects a poisoned reference.
6456 __ Br(ip1); // Jump to the entrypoint.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006457 break;
6458 }
6459 case BakerReadBarrierKind::kArray: {
6460 auto base_reg =
6461 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6462 CheckValidReg(base_reg.GetCode());
6463 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6464 BakerReadBarrierSecondRegField::Decode(encoded_data));
6465 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6466 temps.Exclude(ip0, ip1);
6467 vixl::aarch64::Label slow_path;
6468 int32_t data_offset =
6469 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
6470 MemOperand lock_word(base_reg, mirror::Object::MonitorOffset().Int32Value() - data_offset);
6471 DCHECK_LT(lock_word.GetOffset(), 0);
6472 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path);
6473 __ Bind(&slow_path);
6474 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET);
6475 __ Ldr(ip0.W(), ldr_address); // Load the LDR (register) unsigned offset.
6476 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6477 __ Ubfx(ip0, ip0, 16, 6); // Extract the index register, plus 32 (bit 21 is set).
6478 __ Bfi(ip1, ip0, 3, 6); // Insert ip0 to the entrypoint address to create
6479 // a switch case target based on the index register.
6480 __ Mov(ip0, base_reg); // Move the base register to ip0.
6481 __ Br(ip1); // Jump to the entrypoint's array switch case.
6482 break;
6483 }
6484 case BakerReadBarrierKind::kGcRoot: {
6485 // Check if the reference needs to be marked and if so (i.e. not null, not marked yet
6486 // and it does not have a forwarding address), call the correct introspection entrypoint;
6487 // otherwise return the reference (or the extracted forwarding address).
6488 // There is no gray bit check for GC roots.
6489 auto root_reg =
6490 Register::GetWRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6491 CheckValidReg(root_reg.GetCode());
6492 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6493 BakerReadBarrierSecondRegField::Decode(encoded_data));
6494 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6495 temps.Exclude(ip0, ip1);
6496 vixl::aarch64::Label return_label, not_marked, forwarding_address;
6497 __ Cbz(root_reg, &return_label);
6498 MemOperand lock_word(root_reg.X(), mirror::Object::MonitorOffset().Int32Value());
6499 __ Ldr(ip0.W(), lock_word);
6500 __ Tbz(ip0.W(), LockWord::kMarkBitStateShift, &not_marked);
6501 __ Bind(&return_label);
6502 __ Br(lr);
6503 __ Bind(&not_marked);
6504 __ Tst(ip0.W(), Operand(ip0.W(), LSL, 1));
6505 __ B(&forwarding_address, mi);
6506 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6507 // Adjust the art_quick_read_barrier_mark_introspection address in IP1 to
6508 // art_quick_read_barrier_mark_introspection_gc_roots.
6509 __ Add(ip1, ip1, Operand(BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRYPOINT_OFFSET));
6510 __ Mov(ip0.W(), root_reg);
6511 __ Br(ip1);
6512 __ Bind(&forwarding_address);
6513 __ Lsl(root_reg, ip0.W(), LockWord::kForwardingAddressShift);
6514 __ Br(lr);
6515 break;
6516 }
6517 default:
6518 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
6519 UNREACHABLE();
6520 }
6521
Vladimir Marko966b46f2018-08-03 10:20:19 +00006522 // For JIT, the slow path is considered part of the compiled method,
6523 // so JIT should pass null as `debug_name`. Tests may not have a runtime.
6524 DCHECK(Runtime::Current() == nullptr ||
6525 !Runtime::Current()->UseJitCompilation() ||
6526 debug_name == nullptr);
6527 if (debug_name != nullptr && GetCompilerOptions().GenerateAnyDebugInfo()) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006528 std::ostringstream oss;
6529 oss << "BakerReadBarrierThunk";
6530 switch (kind) {
6531 case BakerReadBarrierKind::kField:
6532 oss << "Field_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
6533 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
6534 break;
Vladimir Marko0ecac682018-08-07 10:40:38 +01006535 case BakerReadBarrierKind::kAcquire:
6536 oss << "Acquire_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
6537 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
6538 break;
Vladimir Markoca1e0382018-04-11 09:58:41 +00006539 case BakerReadBarrierKind::kArray:
6540 oss << "Array_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6541 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6542 BakerReadBarrierSecondRegField::Decode(encoded_data));
6543 break;
6544 case BakerReadBarrierKind::kGcRoot:
6545 oss << "GcRoot_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6546 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6547 BakerReadBarrierSecondRegField::Decode(encoded_data));
6548 break;
6549 }
6550 *debug_name = oss.str();
6551 }
6552}
6553
6554#undef __
6555
Alexandre Rames5319def2014-10-23 10:03:10 +01006556} // namespace arm64
6557} // namespace art