Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "intrinsics_arm64.h" |
| 18 | |
Serban Constantinescu | 579885a | 2015-02-22 20:51:33 +0000 | [diff] [blame] | 19 | #include "arch/arm64/instruction_set_features_arm64.h" |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 20 | #include "code_generator_arm64.h" |
| 21 | #include "common_arm64.h" |
| 22 | #include "entrypoints/quick/quick_entrypoints.h" |
| 23 | #include "intrinsics.h" |
| 24 | #include "mirror/array-inl.h" |
| 25 | #include "mirror/art_method.h" |
| 26 | #include "mirror/string.h" |
| 27 | #include "thread.h" |
| 28 | #include "utils/arm64/assembler_arm64.h" |
| 29 | #include "utils/arm64/constants_arm64.h" |
| 30 | |
Serban Constantinescu | 82e52ce | 2015-03-26 16:50:57 +0000 | [diff] [blame] | 31 | #include "vixl/a64/disasm-a64.h" |
| 32 | #include "vixl/a64/macro-assembler-a64.h" |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 33 | |
| 34 | using namespace vixl; // NOLINT(build/namespaces) |
| 35 | |
| 36 | namespace art { |
| 37 | |
| 38 | namespace arm64 { |
| 39 | |
| 40 | using helpers::DRegisterFrom; |
| 41 | using helpers::FPRegisterFrom; |
| 42 | using helpers::HeapOperand; |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 43 | using helpers::LocationFrom; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 44 | using helpers::RegisterFrom; |
| 45 | using helpers::SRegisterFrom; |
| 46 | using helpers::WRegisterFrom; |
| 47 | using helpers::XRegisterFrom; |
| 48 | |
| 49 | |
| 50 | namespace { |
| 51 | |
| 52 | ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) { |
| 53 | return MemOperand(XRegisterFrom(location), offset); |
| 54 | } |
| 55 | |
| 56 | } // namespace |
| 57 | |
| 58 | vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() { |
| 59 | return codegen_->GetAssembler()->vixl_masm_; |
| 60 | } |
| 61 | |
| 62 | ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() { |
| 63 | return codegen_->GetGraph()->GetArena(); |
| 64 | } |
| 65 | |
| 66 | #define __ codegen->GetAssembler()->vixl_masm_-> |
| 67 | |
| 68 | static void MoveFromReturnRegister(Location trg, |
| 69 | Primitive::Type type, |
| 70 | CodeGeneratorARM64* codegen) { |
| 71 | if (!trg.IsValid()) { |
| 72 | DCHECK(type == Primitive::kPrimVoid); |
| 73 | return; |
| 74 | } |
| 75 | |
| 76 | DCHECK_NE(type, Primitive::kPrimVoid); |
| 77 | |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 78 | if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 79 | Register trg_reg = RegisterFrom(trg, type); |
| 80 | Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type); |
| 81 | __ Mov(trg_reg, res_reg, kDiscardForSameWReg); |
| 82 | } else { |
| 83 | FPRegister trg_reg = FPRegisterFrom(trg, type); |
| 84 | FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type); |
| 85 | __ Fmov(trg_reg, res_reg); |
| 86 | } |
| 87 | } |
| 88 | |
Roland Levillain | ec525fc | 2015-04-28 15:50:20 +0100 | [diff] [blame] | 89 | static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) { |
Roland Levillain | 2d27c8e | 2015-04-28 15:48:45 +0100 | [diff] [blame] | 90 | InvokeDexCallingConventionVisitorARM64 calling_convention_visitor; |
Roland Levillain | ec525fc | 2015-04-28 15:50:20 +0100 | [diff] [blame] | 91 | IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 92 | } |
| 93 | |
| 94 | // Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified |
| 95 | // call. This will copy the arguments into the positions for a regular call. |
| 96 | // |
| 97 | // Note: The actual parameters are required to be in the locations given by the invoke's location |
| 98 | // summary. If an intrinsic modifies those locations before a slowpath call, they must be |
| 99 | // restored! |
| 100 | class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 { |
| 101 | public: |
| 102 | explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { } |
| 103 | |
| 104 | void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { |
| 105 | CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in); |
| 106 | __ Bind(GetEntryLabel()); |
| 107 | |
Nicolas Geoffray | a8ac913 | 2015-03-13 16:36:36 +0000 | [diff] [blame] | 108 | SaveLiveRegisters(codegen, invoke_->GetLocations()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 109 | |
Roland Levillain | ec525fc | 2015-04-28 15:50:20 +0100 | [diff] [blame] | 110 | MoveArguments(invoke_, codegen); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 111 | |
| 112 | if (invoke_->IsInvokeStaticOrDirect()) { |
| 113 | codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister); |
Nicolas Geoffray | a8ac913 | 2015-03-13 16:36:36 +0000 | [diff] [blame] | 114 | RecordPcInfo(codegen, invoke_, invoke_->GetDexPc()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 115 | } else { |
| 116 | UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented"; |
| 117 | UNREACHABLE(); |
| 118 | } |
| 119 | |
| 120 | // Copy the result back to the expected output. |
| 121 | Location out = invoke_->GetLocations()->Out(); |
| 122 | if (out.IsValid()) { |
| 123 | DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory. |
| 124 | DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg())); |
| 125 | MoveFromReturnRegister(out, invoke_->GetType(), codegen); |
| 126 | } |
| 127 | |
Nicolas Geoffray | a8ac913 | 2015-03-13 16:36:36 +0000 | [diff] [blame] | 128 | RestoreLiveRegisters(codegen, invoke_->GetLocations()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 129 | __ B(GetExitLabel()); |
| 130 | } |
| 131 | |
| 132 | private: |
| 133 | // The instruction where this slow path is happening. |
| 134 | HInvoke* const invoke_; |
| 135 | |
| 136 | DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64); |
| 137 | }; |
| 138 | |
| 139 | #undef __ |
| 140 | |
| 141 | bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) { |
| 142 | Dispatch(invoke); |
| 143 | LocationSummary* res = invoke->GetLocations(); |
| 144 | return res != nullptr && res->Intrinsified(); |
| 145 | } |
| 146 | |
| 147 | #define __ masm-> |
| 148 | |
| 149 | static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 150 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 151 | LocationSummary::kNoCall, |
| 152 | kIntrinsified); |
| 153 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 154 | locations->SetOut(Location::RequiresRegister()); |
| 155 | } |
| 156 | |
| 157 | static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 158 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 159 | LocationSummary::kNoCall, |
| 160 | kIntrinsified); |
| 161 | locations->SetInAt(0, Location::RequiresRegister()); |
| 162 | locations->SetOut(Location::RequiresFpuRegister()); |
| 163 | } |
| 164 | |
| 165 | static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { |
| 166 | Location input = locations->InAt(0); |
| 167 | Location output = locations->Out(); |
| 168 | __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output), |
| 169 | is64bit ? DRegisterFrom(input) : SRegisterFrom(input)); |
| 170 | } |
| 171 | |
| 172 | static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { |
| 173 | Location input = locations->InAt(0); |
| 174 | Location output = locations->Out(); |
| 175 | __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output), |
| 176 | is64bit ? XRegisterFrom(input) : WRegisterFrom(input)); |
| 177 | } |
| 178 | |
| 179 | void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { |
| 180 | CreateFPToIntLocations(arena_, invoke); |
| 181 | } |
| 182 | void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { |
| 183 | CreateIntToFPLocations(arena_, invoke); |
| 184 | } |
| 185 | |
| 186 | void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { |
| 187 | MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler()); |
| 188 | } |
| 189 | void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { |
| 190 | MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler()); |
| 191 | } |
| 192 | |
| 193 | void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { |
| 194 | CreateFPToIntLocations(arena_, invoke); |
| 195 | } |
| 196 | void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { |
| 197 | CreateIntToFPLocations(arena_, invoke); |
| 198 | } |
| 199 | |
| 200 | void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { |
| 201 | MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler()); |
| 202 | } |
| 203 | void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { |
| 204 | MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler()); |
| 205 | } |
| 206 | |
| 207 | static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 208 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 209 | LocationSummary::kNoCall, |
| 210 | kIntrinsified); |
| 211 | locations->SetInAt(0, Location::RequiresRegister()); |
| 212 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 213 | } |
| 214 | |
| 215 | static void GenReverseBytes(LocationSummary* locations, |
| 216 | Primitive::Type type, |
| 217 | vixl::MacroAssembler* masm) { |
| 218 | Location in = locations->InAt(0); |
| 219 | Location out = locations->Out(); |
| 220 | |
| 221 | switch (type) { |
| 222 | case Primitive::kPrimShort: |
| 223 | __ Rev16(WRegisterFrom(out), WRegisterFrom(in)); |
| 224 | __ Sxth(WRegisterFrom(out), WRegisterFrom(out)); |
| 225 | break; |
| 226 | case Primitive::kPrimInt: |
| 227 | case Primitive::kPrimLong: |
| 228 | __ Rev(RegisterFrom(out, type), RegisterFrom(in, type)); |
| 229 | break; |
| 230 | default: |
| 231 | LOG(FATAL) << "Unexpected size for reverse-bytes: " << type; |
| 232 | UNREACHABLE(); |
| 233 | } |
| 234 | } |
| 235 | |
| 236 | void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) { |
| 237 | CreateIntToIntLocations(arena_, invoke); |
| 238 | } |
| 239 | |
| 240 | void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) { |
| 241 | GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); |
| 242 | } |
| 243 | |
| 244 | void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) { |
| 245 | CreateIntToIntLocations(arena_, invoke); |
| 246 | } |
| 247 | |
| 248 | void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) { |
| 249 | GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); |
| 250 | } |
| 251 | |
| 252 | void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) { |
| 253 | CreateIntToIntLocations(arena_, invoke); |
| 254 | } |
| 255 | |
| 256 | void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) { |
| 257 | GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler()); |
| 258 | } |
| 259 | |
| 260 | static void GenReverse(LocationSummary* locations, |
| 261 | Primitive::Type type, |
| 262 | vixl::MacroAssembler* masm) { |
| 263 | DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); |
| 264 | |
| 265 | Location in = locations->InAt(0); |
| 266 | Location out = locations->Out(); |
| 267 | |
| 268 | __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type)); |
| 269 | } |
| 270 | |
| 271 | void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) { |
| 272 | CreateIntToIntLocations(arena_, invoke); |
| 273 | } |
| 274 | |
| 275 | void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) { |
| 276 | GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); |
| 277 | } |
| 278 | |
| 279 | void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) { |
| 280 | CreateIntToIntLocations(arena_, invoke); |
| 281 | } |
| 282 | |
| 283 | void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) { |
| 284 | GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); |
| 285 | } |
| 286 | |
| 287 | static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 288 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 289 | LocationSummary::kNoCall, |
| 290 | kIntrinsified); |
| 291 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 292 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 293 | } |
| 294 | |
| 295 | static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { |
| 296 | Location in = locations->InAt(0); |
| 297 | Location out = locations->Out(); |
| 298 | |
| 299 | FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in); |
| 300 | FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out); |
| 301 | |
| 302 | __ Fabs(out_reg, in_reg); |
| 303 | } |
| 304 | |
| 305 | void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) { |
| 306 | CreateFPToFPLocations(arena_, invoke); |
| 307 | } |
| 308 | |
| 309 | void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) { |
| 310 | MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler()); |
| 311 | } |
| 312 | |
| 313 | void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) { |
| 314 | CreateFPToFPLocations(arena_, invoke); |
| 315 | } |
| 316 | |
| 317 | void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) { |
| 318 | MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler()); |
| 319 | } |
| 320 | |
| 321 | static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) { |
| 322 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 323 | LocationSummary::kNoCall, |
| 324 | kIntrinsified); |
| 325 | locations->SetInAt(0, Location::RequiresRegister()); |
| 326 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 327 | } |
| 328 | |
| 329 | static void GenAbsInteger(LocationSummary* locations, |
| 330 | bool is64bit, |
| 331 | vixl::MacroAssembler* masm) { |
| 332 | Location in = locations->InAt(0); |
| 333 | Location output = locations->Out(); |
| 334 | |
| 335 | Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in); |
| 336 | Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output); |
| 337 | |
| 338 | __ Cmp(in_reg, Operand(0)); |
| 339 | __ Cneg(out_reg, in_reg, lt); |
| 340 | } |
| 341 | |
| 342 | void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) { |
| 343 | CreateIntToInt(arena_, invoke); |
| 344 | } |
| 345 | |
| 346 | void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) { |
| 347 | GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler()); |
| 348 | } |
| 349 | |
| 350 | void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) { |
| 351 | CreateIntToInt(arena_, invoke); |
| 352 | } |
| 353 | |
| 354 | void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) { |
| 355 | GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler()); |
| 356 | } |
| 357 | |
| 358 | static void GenMinMaxFP(LocationSummary* locations, |
| 359 | bool is_min, |
| 360 | bool is_double, |
| 361 | vixl::MacroAssembler* masm) { |
| 362 | Location op1 = locations->InAt(0); |
| 363 | Location op2 = locations->InAt(1); |
| 364 | Location out = locations->Out(); |
| 365 | |
| 366 | FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1); |
| 367 | FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2); |
| 368 | FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out); |
| 369 | if (is_min) { |
| 370 | __ Fmin(out_reg, op1_reg, op2_reg); |
| 371 | } else { |
| 372 | __ Fmax(out_reg, op1_reg, op2_reg); |
| 373 | } |
| 374 | } |
| 375 | |
| 376 | static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 377 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 378 | LocationSummary::kNoCall, |
| 379 | kIntrinsified); |
| 380 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 381 | locations->SetInAt(1, Location::RequiresFpuRegister()); |
| 382 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 383 | } |
| 384 | |
| 385 | void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { |
| 386 | CreateFPFPToFPLocations(arena_, invoke); |
| 387 | } |
| 388 | |
| 389 | void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { |
| 390 | GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler()); |
| 391 | } |
| 392 | |
| 393 | void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) { |
| 394 | CreateFPFPToFPLocations(arena_, invoke); |
| 395 | } |
| 396 | |
| 397 | void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) { |
| 398 | GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler()); |
| 399 | } |
| 400 | |
| 401 | void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { |
| 402 | CreateFPFPToFPLocations(arena_, invoke); |
| 403 | } |
| 404 | |
| 405 | void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { |
| 406 | GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler()); |
| 407 | } |
| 408 | |
| 409 | void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { |
| 410 | CreateFPFPToFPLocations(arena_, invoke); |
| 411 | } |
| 412 | |
| 413 | void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { |
| 414 | GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler()); |
| 415 | } |
| 416 | |
| 417 | static void GenMinMax(LocationSummary* locations, |
| 418 | bool is_min, |
| 419 | bool is_long, |
| 420 | vixl::MacroAssembler* masm) { |
| 421 | Location op1 = locations->InAt(0); |
| 422 | Location op2 = locations->InAt(1); |
| 423 | Location out = locations->Out(); |
| 424 | |
| 425 | Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1); |
| 426 | Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2); |
| 427 | Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out); |
| 428 | |
| 429 | __ Cmp(op1_reg, op2_reg); |
| 430 | __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt); |
| 431 | } |
| 432 | |
| 433 | static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 434 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 435 | LocationSummary::kNoCall, |
| 436 | kIntrinsified); |
| 437 | locations->SetInAt(0, Location::RequiresRegister()); |
| 438 | locations->SetInAt(1, Location::RequiresRegister()); |
| 439 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 440 | } |
| 441 | |
| 442 | void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) { |
| 443 | CreateIntIntToIntLocations(arena_, invoke); |
| 444 | } |
| 445 | |
| 446 | void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) { |
| 447 | GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler()); |
| 448 | } |
| 449 | |
| 450 | void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) { |
| 451 | CreateIntIntToIntLocations(arena_, invoke); |
| 452 | } |
| 453 | |
| 454 | void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) { |
| 455 | GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler()); |
| 456 | } |
| 457 | |
| 458 | void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) { |
| 459 | CreateIntIntToIntLocations(arena_, invoke); |
| 460 | } |
| 461 | |
| 462 | void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) { |
| 463 | GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler()); |
| 464 | } |
| 465 | |
| 466 | void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) { |
| 467 | CreateIntIntToIntLocations(arena_, invoke); |
| 468 | } |
| 469 | |
| 470 | void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) { |
| 471 | GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler()); |
| 472 | } |
| 473 | |
| 474 | void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) { |
| 475 | CreateFPToFPLocations(arena_, invoke); |
| 476 | } |
| 477 | |
| 478 | void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) { |
| 479 | LocationSummary* locations = invoke->GetLocations(); |
| 480 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 481 | __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 482 | } |
| 483 | |
| 484 | void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) { |
| 485 | CreateFPToFPLocations(arena_, invoke); |
| 486 | } |
| 487 | |
| 488 | void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) { |
| 489 | LocationSummary* locations = invoke->GetLocations(); |
| 490 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 491 | __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 492 | } |
| 493 | |
| 494 | void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) { |
| 495 | CreateFPToFPLocations(arena_, invoke); |
| 496 | } |
| 497 | |
| 498 | void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) { |
| 499 | LocationSummary* locations = invoke->GetLocations(); |
| 500 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 501 | __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 502 | } |
| 503 | |
| 504 | void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) { |
| 505 | CreateFPToFPLocations(arena_, invoke); |
| 506 | } |
| 507 | |
| 508 | void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) { |
| 509 | LocationSummary* locations = invoke->GetLocations(); |
| 510 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 511 | __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 512 | } |
| 513 | |
| 514 | static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 515 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 516 | LocationSummary::kNoCall, |
| 517 | kIntrinsified); |
| 518 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 519 | locations->SetOut(Location::RequiresRegister()); |
| 520 | } |
| 521 | |
| 522 | static void GenMathRound(LocationSummary* locations, |
| 523 | bool is_double, |
| 524 | vixl::MacroAssembler* masm) { |
| 525 | FPRegister in_reg = is_double ? |
| 526 | DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0)); |
| 527 | Register out_reg = is_double ? |
| 528 | XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out()); |
| 529 | UseScratchRegisterScope temps(masm); |
| 530 | FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg); |
| 531 | |
| 532 | // 0.5 can be encoded as an immediate, so use fmov. |
| 533 | if (is_double) { |
| 534 | __ Fmov(temp1_reg, static_cast<double>(0.5)); |
| 535 | } else { |
| 536 | __ Fmov(temp1_reg, static_cast<float>(0.5)); |
| 537 | } |
| 538 | __ Fadd(temp1_reg, in_reg, temp1_reg); |
| 539 | __ Fcvtms(out_reg, temp1_reg); |
| 540 | } |
| 541 | |
| 542 | void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) { |
| 543 | CreateFPToIntPlusTempLocations(arena_, invoke); |
| 544 | } |
| 545 | |
| 546 | void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) { |
| 547 | GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler()); |
| 548 | } |
| 549 | |
| 550 | void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) { |
| 551 | CreateFPToIntPlusTempLocations(arena_, invoke); |
| 552 | } |
| 553 | |
| 554 | void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) { |
| 555 | GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler()); |
| 556 | } |
| 557 | |
| 558 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) { |
| 559 | CreateIntToIntLocations(arena_, invoke); |
| 560 | } |
| 561 | |
| 562 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) { |
| 563 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 564 | __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()), |
| 565 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 566 | } |
| 567 | |
| 568 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) { |
| 569 | CreateIntToIntLocations(arena_, invoke); |
| 570 | } |
| 571 | |
| 572 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) { |
| 573 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 574 | __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()), |
| 575 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 576 | } |
| 577 | |
| 578 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) { |
| 579 | CreateIntToIntLocations(arena_, invoke); |
| 580 | } |
| 581 | |
| 582 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) { |
| 583 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 584 | __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()), |
| 585 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 586 | } |
| 587 | |
| 588 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) { |
| 589 | CreateIntToIntLocations(arena_, invoke); |
| 590 | } |
| 591 | |
| 592 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) { |
| 593 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 594 | __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()), |
| 595 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 596 | } |
| 597 | |
| 598 | static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 599 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 600 | LocationSummary::kNoCall, |
| 601 | kIntrinsified); |
| 602 | locations->SetInAt(0, Location::RequiresRegister()); |
| 603 | locations->SetInAt(1, Location::RequiresRegister()); |
| 604 | } |
| 605 | |
| 606 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) { |
| 607 | CreateIntIntToVoidLocations(arena_, invoke); |
| 608 | } |
| 609 | |
| 610 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) { |
| 611 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 612 | __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 613 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 614 | } |
| 615 | |
| 616 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) { |
| 617 | CreateIntIntToVoidLocations(arena_, invoke); |
| 618 | } |
| 619 | |
| 620 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) { |
| 621 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 622 | __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 623 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 624 | } |
| 625 | |
| 626 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) { |
| 627 | CreateIntIntToVoidLocations(arena_, invoke); |
| 628 | } |
| 629 | |
| 630 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) { |
| 631 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 632 | __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 633 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 634 | } |
| 635 | |
| 636 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) { |
| 637 | CreateIntIntToVoidLocations(arena_, invoke); |
| 638 | } |
| 639 | |
| 640 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) { |
| 641 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 642 | __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 643 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 644 | } |
| 645 | |
| 646 | void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) { |
| 647 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 648 | LocationSummary::kNoCall, |
| 649 | kIntrinsified); |
| 650 | locations->SetOut(Location::RequiresRegister()); |
| 651 | } |
| 652 | |
| 653 | void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) { |
| 654 | codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()), |
| 655 | MemOperand(tr, Thread::PeerOffset<8>().Int32Value())); |
| 656 | } |
| 657 | |
| 658 | static void GenUnsafeGet(HInvoke* invoke, |
| 659 | Primitive::Type type, |
| 660 | bool is_volatile, |
| 661 | CodeGeneratorARM64* codegen) { |
| 662 | LocationSummary* locations = invoke->GetLocations(); |
| 663 | DCHECK((type == Primitive::kPrimInt) || |
| 664 | (type == Primitive::kPrimLong) || |
| 665 | (type == Primitive::kPrimNot)); |
| 666 | vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; |
| 667 | Register base = WRegisterFrom(locations->InAt(1)); // Object pointer. |
| 668 | Register offset = XRegisterFrom(locations->InAt(2)); // Long offset. |
| 669 | Register trg = RegisterFrom(locations->Out(), type); |
Serban Constantinescu | 579885a | 2015-02-22 20:51:33 +0000 | [diff] [blame] | 670 | bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease(); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 671 | |
| 672 | MemOperand mem_op(base.X(), offset); |
| 673 | if (is_volatile) { |
Serban Constantinescu | 579885a | 2015-02-22 20:51:33 +0000 | [diff] [blame] | 674 | if (use_acquire_release) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 675 | codegen->LoadAcquire(invoke, trg, mem_op); |
| 676 | } else { |
| 677 | codegen->Load(type, trg, mem_op); |
| 678 | __ Dmb(InnerShareable, BarrierReads); |
| 679 | } |
| 680 | } else { |
| 681 | codegen->Load(type, trg, mem_op); |
| 682 | } |
| 683 | } |
| 684 | |
| 685 | static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 686 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 687 | LocationSummary::kNoCall, |
| 688 | kIntrinsified); |
| 689 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 690 | locations->SetInAt(1, Location::RequiresRegister()); |
| 691 | locations->SetInAt(2, Location::RequiresRegister()); |
| 692 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 693 | } |
| 694 | |
| 695 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) { |
| 696 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 697 | } |
| 698 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) { |
| 699 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 700 | } |
| 701 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) { |
| 702 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 703 | } |
| 704 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { |
| 705 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 706 | } |
| 707 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) { |
| 708 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 709 | } |
| 710 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { |
| 711 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 712 | } |
| 713 | |
| 714 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) { |
| 715 | GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); |
| 716 | } |
| 717 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) { |
| 718 | GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); |
| 719 | } |
| 720 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) { |
| 721 | GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); |
| 722 | } |
| 723 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { |
| 724 | GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); |
| 725 | } |
| 726 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) { |
| 727 | GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); |
| 728 | } |
| 729 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { |
| 730 | GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); |
| 731 | } |
| 732 | |
| 733 | static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) { |
| 734 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 735 | LocationSummary::kNoCall, |
| 736 | kIntrinsified); |
| 737 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 738 | locations->SetInAt(1, Location::RequiresRegister()); |
| 739 | locations->SetInAt(2, Location::RequiresRegister()); |
| 740 | locations->SetInAt(3, Location::RequiresRegister()); |
| 741 | } |
| 742 | |
| 743 | void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) { |
| 744 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 745 | } |
| 746 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) { |
| 747 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 748 | } |
| 749 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) { |
| 750 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 751 | } |
| 752 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) { |
| 753 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 754 | } |
| 755 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { |
| 756 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 757 | } |
| 758 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { |
| 759 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 760 | } |
| 761 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) { |
| 762 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 763 | } |
| 764 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) { |
| 765 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 766 | } |
| 767 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) { |
| 768 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 769 | } |
| 770 | |
| 771 | static void GenUnsafePut(LocationSummary* locations, |
| 772 | Primitive::Type type, |
| 773 | bool is_volatile, |
| 774 | bool is_ordered, |
| 775 | CodeGeneratorARM64* codegen) { |
| 776 | vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; |
| 777 | |
| 778 | Register base = WRegisterFrom(locations->InAt(1)); // Object pointer. |
| 779 | Register offset = XRegisterFrom(locations->InAt(2)); // Long offset. |
| 780 | Register value = RegisterFrom(locations->InAt(3), type); |
Serban Constantinescu | 579885a | 2015-02-22 20:51:33 +0000 | [diff] [blame] | 781 | bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease(); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 782 | |
| 783 | MemOperand mem_op(base.X(), offset); |
| 784 | |
| 785 | if (is_volatile || is_ordered) { |
Serban Constantinescu | 579885a | 2015-02-22 20:51:33 +0000 | [diff] [blame] | 786 | if (use_acquire_release) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 787 | codegen->StoreRelease(type, value, mem_op); |
| 788 | } else { |
| 789 | __ Dmb(InnerShareable, BarrierAll); |
| 790 | codegen->Store(type, value, mem_op); |
| 791 | if (is_volatile) { |
| 792 | __ Dmb(InnerShareable, BarrierReads); |
| 793 | } |
| 794 | } |
| 795 | } else { |
| 796 | codegen->Store(type, value, mem_op); |
| 797 | } |
| 798 | |
| 799 | if (type == Primitive::kPrimNot) { |
| 800 | codegen->MarkGCCard(base, value); |
| 801 | } |
| 802 | } |
| 803 | |
| 804 | void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) { |
| 805 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_); |
| 806 | } |
| 807 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) { |
| 808 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_); |
| 809 | } |
| 810 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) { |
| 811 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_); |
| 812 | } |
| 813 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) { |
| 814 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_); |
| 815 | } |
| 816 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { |
| 817 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_); |
| 818 | } |
| 819 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { |
| 820 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_); |
| 821 | } |
| 822 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) { |
| 823 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_); |
| 824 | } |
| 825 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) { |
| 826 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_); |
| 827 | } |
| 828 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) { |
| 829 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_); |
| 830 | } |
| 831 | |
| 832 | static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) { |
| 833 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 834 | LocationSummary::kNoCall, |
| 835 | kIntrinsified); |
| 836 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 837 | locations->SetInAt(1, Location::RequiresRegister()); |
| 838 | locations->SetInAt(2, Location::RequiresRegister()); |
| 839 | locations->SetInAt(3, Location::RequiresRegister()); |
| 840 | locations->SetInAt(4, Location::RequiresRegister()); |
| 841 | |
| 842 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 843 | } |
| 844 | |
| 845 | static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) { |
Serban Constantinescu | 579885a | 2015-02-22 20:51:33 +0000 | [diff] [blame] | 846 | bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease(); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 847 | vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; |
| 848 | |
| 849 | Register out = WRegisterFrom(locations->Out()); // Boolean result. |
| 850 | |
| 851 | Register base = WRegisterFrom(locations->InAt(1)); // Object pointer. |
| 852 | Register offset = XRegisterFrom(locations->InAt(2)); // Long offset. |
| 853 | Register expected = RegisterFrom(locations->InAt(3), type); // Expected. |
| 854 | Register value = RegisterFrom(locations->InAt(4), type); // Value. |
| 855 | |
| 856 | // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps. |
| 857 | if (type == Primitive::kPrimNot) { |
| 858 | // Mark card for object assuming new value is stored. |
| 859 | codegen->MarkGCCard(base, value); |
| 860 | } |
| 861 | |
| 862 | UseScratchRegisterScope temps(masm); |
| 863 | Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory. |
| 864 | Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory. |
| 865 | |
| 866 | Register tmp_32 = tmp_value.W(); |
| 867 | |
| 868 | __ Add(tmp_ptr, base.X(), Operand(offset)); |
| 869 | |
| 870 | // do { |
| 871 | // tmp_value = [tmp_ptr] - expected; |
| 872 | // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value)); |
| 873 | // result = tmp_value != 0; |
| 874 | |
| 875 | vixl::Label loop_head, exit_loop; |
Serban Constantinescu | 579885a | 2015-02-22 20:51:33 +0000 | [diff] [blame] | 876 | if (use_acquire_release) { |
| 877 | __ Bind(&loop_head); |
| 878 | __ Ldaxr(tmp_value, MemOperand(tmp_ptr)); |
| 879 | __ Cmp(tmp_value, expected); |
| 880 | __ B(&exit_loop, ne); |
| 881 | __ Stlxr(tmp_32, value, MemOperand(tmp_ptr)); |
| 882 | __ Cbnz(tmp_32, &loop_head); |
| 883 | } else { |
| 884 | __ Dmb(InnerShareable, BarrierWrites); |
| 885 | __ Bind(&loop_head); |
| 886 | __ Ldxr(tmp_value, MemOperand(tmp_ptr)); |
| 887 | __ Cmp(tmp_value, expected); |
| 888 | __ B(&exit_loop, ne); |
| 889 | __ Stxr(tmp_32, value, MemOperand(tmp_ptr)); |
| 890 | __ Cbnz(tmp_32, &loop_head); |
| 891 | __ Dmb(InnerShareable, BarrierAll); |
| 892 | } |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 893 | __ Bind(&exit_loop); |
| 894 | __ Cset(out, eq); |
| 895 | } |
| 896 | |
| 897 | void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) { |
| 898 | CreateIntIntIntIntIntToInt(arena_, invoke); |
| 899 | } |
| 900 | void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) { |
| 901 | CreateIntIntIntIntIntToInt(arena_, invoke); |
| 902 | } |
| 903 | void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) { |
| 904 | CreateIntIntIntIntIntToInt(arena_, invoke); |
| 905 | } |
| 906 | |
| 907 | void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) { |
| 908 | GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_); |
| 909 | } |
| 910 | void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) { |
| 911 | GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_); |
| 912 | } |
| 913 | void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) { |
| 914 | GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_); |
| 915 | } |
| 916 | |
| 917 | void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 918 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 919 | LocationSummary::kCallOnSlowPath, |
| 920 | kIntrinsified); |
| 921 | locations->SetInAt(0, Location::RequiresRegister()); |
| 922 | locations->SetInAt(1, Location::RequiresRegister()); |
Nicolas Geoffray | 82f3449 | 2015-02-04 10:44:23 +0000 | [diff] [blame] | 923 | // In case we need to go in the slow path, we can't have the output be the same |
| 924 | // as the input: the current liveness analysis considers the input to be live |
| 925 | // at the point of the call. |
| 926 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 927 | } |
| 928 | |
| 929 | void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) { |
| 930 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 931 | LocationSummary* locations = invoke->GetLocations(); |
| 932 | |
| 933 | // Location of reference to data array |
| 934 | const MemberOffset value_offset = mirror::String::ValueOffset(); |
| 935 | // Location of count |
| 936 | const MemberOffset count_offset = mirror::String::CountOffset(); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 937 | |
| 938 | Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer. |
| 939 | Register idx = WRegisterFrom(locations->InAt(1)); // Index of character. |
| 940 | Register out = WRegisterFrom(locations->Out()); // Result character. |
| 941 | |
| 942 | UseScratchRegisterScope temps(masm); |
| 943 | Register temp = temps.AcquireW(); |
| 944 | Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling. |
| 945 | |
| 946 | // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth |
| 947 | // the cost. |
| 948 | // TODO: For simplicity, the index parameter is requested in a register, so different from Quick |
| 949 | // we will not optimize the code for constants (which would save a register). |
| 950 | |
| 951 | SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); |
| 952 | codegen_->AddSlowPath(slow_path); |
| 953 | |
| 954 | __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length. |
| 955 | codegen_->MaybeRecordImplicitNullCheck(invoke); |
| 956 | __ Cmp(idx, temp); |
| 957 | __ B(hs, slow_path->GetEntryLabel()); |
| 958 | |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 959 | __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value. |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 960 | |
| 961 | // Load the value. |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 962 | __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx]. |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 963 | |
| 964 | __ Bind(slow_path->GetExitLabel()); |
| 965 | } |
| 966 | |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 967 | void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) { |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 968 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 969 | LocationSummary::kCall, |
| 970 | kIntrinsified); |
| 971 | InvokeRuntimeCallingConvention calling_convention; |
| 972 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 973 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 974 | locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt)); |
| 975 | } |
| 976 | |
| 977 | void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) { |
| 978 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 979 | LocationSummary* locations = invoke->GetLocations(); |
| 980 | |
Nicolas Geoffray | 512e04d | 2015-03-27 17:21:24 +0000 | [diff] [blame] | 981 | // Note that the null check must have been done earlier. |
Calin Juravle | 641547a | 2015-04-21 22:08:51 +0100 | [diff] [blame] | 982 | DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 983 | |
| 984 | Register argument = WRegisterFrom(locations->InAt(1)); |
| 985 | __ Cmp(argument, 0); |
| 986 | SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); |
| 987 | codegen_->AddSlowPath(slow_path); |
| 988 | __ B(eq, slow_path->GetEntryLabel()); |
| 989 | |
| 990 | __ Ldr( |
| 991 | lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value())); |
| 992 | __ Blr(lr); |
| 993 | __ Bind(slow_path->GetExitLabel()); |
| 994 | } |
| 995 | |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 996 | static void GenerateVisitStringIndexOf(HInvoke* invoke, |
| 997 | vixl::MacroAssembler* masm, |
| 998 | CodeGeneratorARM64* codegen, |
| 999 | ArenaAllocator* allocator, |
| 1000 | bool start_at_zero) { |
| 1001 | LocationSummary* locations = invoke->GetLocations(); |
| 1002 | Register tmp_reg = WRegisterFrom(locations->GetTemp(0)); |
| 1003 | |
| 1004 | // Note that the null check must have been done earlier. |
| 1005 | DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); |
| 1006 | |
| 1007 | // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, |
| 1008 | // or directly dispatch if we have a constant. |
| 1009 | SlowPathCodeARM64* slow_path = nullptr; |
| 1010 | if (invoke->InputAt(1)->IsIntConstant()) { |
| 1011 | if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) { |
| 1012 | // Always needs the slow-path. We could directly dispatch to it, but this case should be |
| 1013 | // rare, so for simplicity just put the full slow-path down and branch unconditionally. |
| 1014 | slow_path = new (allocator) IntrinsicSlowPathARM64(invoke); |
| 1015 | codegen->AddSlowPath(slow_path); |
| 1016 | __ B(slow_path->GetEntryLabel()); |
| 1017 | __ Bind(slow_path->GetExitLabel()); |
| 1018 | return; |
| 1019 | } |
| 1020 | } else { |
| 1021 | Register char_reg = WRegisterFrom(locations->InAt(1)); |
| 1022 | __ Mov(tmp_reg, 0xFFFF); |
| 1023 | __ Cmp(char_reg, Operand(tmp_reg)); |
| 1024 | slow_path = new (allocator) IntrinsicSlowPathARM64(invoke); |
| 1025 | codegen->AddSlowPath(slow_path); |
| 1026 | __ B(hi, slow_path->GetEntryLabel()); |
| 1027 | } |
| 1028 | |
| 1029 | if (start_at_zero) { |
| 1030 | // Start-index = 0. |
| 1031 | __ Mov(tmp_reg, 0); |
| 1032 | } |
| 1033 | |
| 1034 | __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value())); |
| 1035 | __ Blr(lr); |
| 1036 | |
| 1037 | if (slow_path != nullptr) { |
| 1038 | __ Bind(slow_path->GetExitLabel()); |
| 1039 | } |
| 1040 | } |
| 1041 | |
| 1042 | void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) { |
| 1043 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1044 | LocationSummary::kCall, |
| 1045 | kIntrinsified); |
| 1046 | // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's |
| 1047 | // best to align the inputs accordingly. |
| 1048 | InvokeRuntimeCallingConvention calling_convention; |
| 1049 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 1050 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 1051 | locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt)); |
| 1052 | |
| 1053 | // Need a temp for slow-path codepoint compare, and need to send start_index=0. |
| 1054 | locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2))); |
| 1055 | } |
| 1056 | |
| 1057 | void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) { |
| 1058 | GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), true); |
| 1059 | } |
| 1060 | |
| 1061 | void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) { |
| 1062 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1063 | LocationSummary::kCall, |
| 1064 | kIntrinsified); |
| 1065 | // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's |
| 1066 | // best to align the inputs accordingly. |
| 1067 | InvokeRuntimeCallingConvention calling_convention; |
| 1068 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 1069 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 1070 | locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); |
| 1071 | locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt)); |
| 1072 | |
| 1073 | // Need a temp for slow-path codepoint compare. |
| 1074 | locations->AddTemp(Location::RequiresRegister()); |
| 1075 | } |
| 1076 | |
| 1077 | void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) { |
| 1078 | GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), false); |
| 1079 | } |
| 1080 | |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1081 | void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) { |
| 1082 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1083 | LocationSummary::kCall, |
| 1084 | kIntrinsified); |
| 1085 | InvokeRuntimeCallingConvention calling_convention; |
| 1086 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 1087 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 1088 | locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); |
| 1089 | locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3))); |
| 1090 | locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot)); |
| 1091 | } |
| 1092 | |
| 1093 | void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) { |
| 1094 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 1095 | LocationSummary* locations = invoke->GetLocations(); |
| 1096 | |
| 1097 | Register byte_array = WRegisterFrom(locations->InAt(0)); |
| 1098 | __ Cmp(byte_array, 0); |
| 1099 | SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); |
| 1100 | codegen_->AddSlowPath(slow_path); |
| 1101 | __ B(eq, slow_path->GetEntryLabel()); |
| 1102 | |
| 1103 | __ Ldr(lr, |
| 1104 | MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value())); |
| 1105 | codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); |
| 1106 | __ Blr(lr); |
| 1107 | __ Bind(slow_path->GetExitLabel()); |
| 1108 | } |
| 1109 | |
| 1110 | void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) { |
| 1111 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1112 | LocationSummary::kCall, |
| 1113 | kIntrinsified); |
| 1114 | InvokeRuntimeCallingConvention calling_convention; |
| 1115 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 1116 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 1117 | locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); |
| 1118 | locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot)); |
| 1119 | } |
| 1120 | |
| 1121 | void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) { |
| 1122 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 1123 | |
| 1124 | __ Ldr(lr, |
| 1125 | MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value())); |
| 1126 | codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); |
| 1127 | __ Blr(lr); |
| 1128 | } |
| 1129 | |
| 1130 | void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) { |
| 1131 | // The inputs plus one temp. |
| 1132 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1133 | LocationSummary::kCall, |
| 1134 | kIntrinsified); |
| 1135 | InvokeRuntimeCallingConvention calling_convention; |
| 1136 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 1137 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 1138 | locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); |
| 1139 | locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot)); |
| 1140 | } |
| 1141 | |
| 1142 | void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) { |
| 1143 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 1144 | LocationSummary* locations = invoke->GetLocations(); |
| 1145 | |
| 1146 | Register string_to_copy = WRegisterFrom(locations->InAt(0)); |
| 1147 | __ Cmp(string_to_copy, 0); |
| 1148 | SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); |
| 1149 | codegen_->AddSlowPath(slow_path); |
| 1150 | __ B(eq, slow_path->GetEntryLabel()); |
| 1151 | |
| 1152 | __ Ldr(lr, |
| 1153 | MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value())); |
| 1154 | codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); |
| 1155 | __ Blr(lr); |
| 1156 | __ Bind(slow_path->GetExitLabel()); |
| 1157 | } |
| 1158 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1159 | // Unimplemented intrinsics. |
| 1160 | |
| 1161 | #define UNIMPLEMENTED_INTRINSIC(Name) \ |
| 1162 | void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ |
| 1163 | } \ |
| 1164 | void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ |
| 1165 | } |
| 1166 | |
| 1167 | UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar) |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1168 | UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent) |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1169 | UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck) |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1170 | |
| 1171 | } // namespace arm64 |
| 1172 | } // namespace art |