| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 1 | /* | 
 | 2 |  * Copyright (C) 2015 The Android Open Source Project | 
 | 3 |  * | 
 | 4 |  * Licensed under the Apache License, Version 2.0 (the "License"); | 
 | 5 |  * you may not use this file except in compliance with the License. | 
 | 6 |  * You may obtain a copy of the License at | 
 | 7 |  * | 
 | 8 |  *      http://www.apache.org/licenses/LICENSE-2.0 | 
 | 9 |  * | 
 | 10 |  * Unless required by applicable law or agreed to in writing, software | 
 | 11 |  * distributed under the License is distributed on an "AS IS" BASIS, | 
 | 12 |  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
 | 13 |  * See the License for the specific language governing permissions and | 
 | 14 |  * limitations under the License. | 
 | 15 |  */ | 
 | 16 |  | 
 | 17 | #include "intrinsics_arm.h" | 
 | 18 |  | 
 | 19 | #include "arch/arm/instruction_set_features_arm.h" | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 20 | #include "art_method.h" | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 21 | #include "code_generator_arm.h" | 
 | 22 | #include "entrypoints/quick/quick_entrypoints.h" | 
 | 23 | #include "intrinsics.h" | 
| Andreas Gampe | 85b62f2 | 2015-09-09 13:15:38 -0700 | [diff] [blame] | 24 | #include "intrinsics_utils.h" | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 25 | #include "mirror/array-inl.h" | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 26 | #include "mirror/string.h" | 
 | 27 | #include "thread.h" | 
 | 28 | #include "utils/arm/assembler_arm.h" | 
 | 29 |  | 
 | 30 | namespace art { | 
 | 31 |  | 
 | 32 | namespace arm { | 
 | 33 |  | 
 | 34 | ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() { | 
 | 35 |   return codegen_->GetAssembler(); | 
 | 36 | } | 
 | 37 |  | 
 | 38 | ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() { | 
 | 39 |   return codegen_->GetGraph()->GetArena(); | 
 | 40 | } | 
 | 41 |  | 
| Andreas Gampe | 85b62f2 | 2015-09-09 13:15:38 -0700 | [diff] [blame] | 42 | using IntrinsicSlowPathARM = IntrinsicSlowPath<InvokeDexCallingConventionVisitorARM>; | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 43 |  | 
 | 44 | bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) { | 
 | 45 |   Dispatch(invoke); | 
 | 46 |   LocationSummary* res = invoke->GetLocations(); | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 47 |   if (res == nullptr) { | 
 | 48 |     return false; | 
 | 49 |   } | 
 | 50 |   if (kEmitCompilerReadBarrier && res->CanCall()) { | 
 | 51 |     // Generating an intrinsic for this HInvoke may produce an | 
 | 52 |     // IntrinsicSlowPathARM slow path.  Currently this approach | 
 | 53 |     // does not work when using read barriers, as the emitted | 
 | 54 |     // calling sequence will make use of another slow path | 
 | 55 |     // (ReadBarrierForRootSlowPathARM for HInvokeStaticOrDirect, | 
 | 56 |     // ReadBarrierSlowPathARM for HInvokeVirtual).  So we bail | 
 | 57 |     // out in this case. | 
 | 58 |     // | 
 | 59 |     // TODO: Find a way to have intrinsics work with read barriers. | 
 | 60 |     invoke->SetLocations(nullptr); | 
 | 61 |     return false; | 
 | 62 |   } | 
 | 63 |   return res->Intrinsified(); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 64 | } | 
 | 65 |  | 
 | 66 | #define __ assembler-> | 
 | 67 |  | 
 | 68 | static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { | 
 | 69 |   LocationSummary* locations = new (arena) LocationSummary(invoke, | 
 | 70 |                                                            LocationSummary::kNoCall, | 
 | 71 |                                                            kIntrinsified); | 
 | 72 |   locations->SetInAt(0, Location::RequiresFpuRegister()); | 
 | 73 |   locations->SetOut(Location::RequiresRegister()); | 
 | 74 | } | 
 | 75 |  | 
 | 76 | static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { | 
 | 77 |   LocationSummary* locations = new (arena) LocationSummary(invoke, | 
 | 78 |                                                            LocationSummary::kNoCall, | 
 | 79 |                                                            kIntrinsified); | 
 | 80 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 81 |   locations->SetOut(Location::RequiresFpuRegister()); | 
 | 82 | } | 
 | 83 |  | 
 | 84 | static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { | 
 | 85 |   Location input = locations->InAt(0); | 
 | 86 |   Location output = locations->Out(); | 
 | 87 |   if (is64bit) { | 
 | 88 |     __ vmovrrd(output.AsRegisterPairLow<Register>(), | 
 | 89 |                output.AsRegisterPairHigh<Register>(), | 
 | 90 |                FromLowSToD(input.AsFpuRegisterPairLow<SRegister>())); | 
 | 91 |   } else { | 
 | 92 |     __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>()); | 
 | 93 |   } | 
 | 94 | } | 
 | 95 |  | 
 | 96 | static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { | 
 | 97 |   Location input = locations->InAt(0); | 
 | 98 |   Location output = locations->Out(); | 
 | 99 |   if (is64bit) { | 
 | 100 |     __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()), | 
 | 101 |                input.AsRegisterPairLow<Register>(), | 
 | 102 |                input.AsRegisterPairHigh<Register>()); | 
 | 103 |   } else { | 
 | 104 |     __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>()); | 
 | 105 |   } | 
 | 106 | } | 
 | 107 |  | 
 | 108 | void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { | 
 | 109 |   CreateFPToIntLocations(arena_, invoke); | 
 | 110 | } | 
 | 111 | void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) { | 
 | 112 |   CreateIntToFPLocations(arena_, invoke); | 
 | 113 | } | 
 | 114 |  | 
 | 115 | void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 116 |   MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 117 | } | 
 | 118 | void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 119 |   MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 120 | } | 
 | 121 |  | 
 | 122 | void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) { | 
 | 123 |   CreateFPToIntLocations(arena_, invoke); | 
 | 124 | } | 
 | 125 | void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) { | 
 | 126 |   CreateIntToFPLocations(arena_, invoke); | 
 | 127 | } | 
 | 128 |  | 
 | 129 | void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 130 |   MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 131 | } | 
 | 132 | void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 133 |   MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 134 | } | 
 | 135 |  | 
 | 136 | static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { | 
 | 137 |   LocationSummary* locations = new (arena) LocationSummary(invoke, | 
 | 138 |                                                            LocationSummary::kNoCall, | 
 | 139 |                                                            kIntrinsified); | 
 | 140 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 141 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 142 | } | 
 | 143 |  | 
 | 144 | static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { | 
 | 145 |   LocationSummary* locations = new (arena) LocationSummary(invoke, | 
 | 146 |                                                            LocationSummary::kNoCall, | 
 | 147 |                                                            kIntrinsified); | 
 | 148 |   locations->SetInAt(0, Location::RequiresFpuRegister()); | 
 | 149 |   locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); | 
 | 150 | } | 
 | 151 |  | 
| Scott Wakeling | 611d339 | 2015-07-10 11:42:06 +0100 | [diff] [blame] | 152 | static void GenNumberOfLeadingZeros(LocationSummary* locations, | 
 | 153 |                                     Primitive::Type type, | 
 | 154 |                                     ArmAssembler* assembler) { | 
 | 155 |   Location in = locations->InAt(0); | 
 | 156 |   Register out = locations->Out().AsRegister<Register>(); | 
 | 157 |  | 
 | 158 |   DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong)); | 
 | 159 |  | 
 | 160 |   if (type == Primitive::kPrimLong) { | 
 | 161 |     Register in_reg_lo = in.AsRegisterPairLow<Register>(); | 
 | 162 |     Register in_reg_hi = in.AsRegisterPairHigh<Register>(); | 
 | 163 |     Label end; | 
 | 164 |     __ clz(out, in_reg_hi); | 
 | 165 |     __ CompareAndBranchIfNonZero(in_reg_hi, &end); | 
 | 166 |     __ clz(out, in_reg_lo); | 
 | 167 |     __ AddConstant(out, 32); | 
 | 168 |     __ Bind(&end); | 
 | 169 |   } else { | 
 | 170 |     __ clz(out, in.AsRegister<Register>()); | 
 | 171 |   } | 
 | 172 | } | 
 | 173 |  | 
 | 174 | void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { | 
 | 175 |   CreateIntToIntLocations(arena_, invoke); | 
 | 176 | } | 
 | 177 |  | 
 | 178 | void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { | 
 | 179 |   GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler()); | 
 | 180 | } | 
 | 181 |  | 
 | 182 | void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { | 
 | 183 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 184 |                                                            LocationSummary::kNoCall, | 
 | 185 |                                                            kIntrinsified); | 
 | 186 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 187 |   locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); | 
 | 188 | } | 
 | 189 |  | 
 | 190 | void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { | 
 | 191 |   GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler()); | 
 | 192 | } | 
 | 193 |  | 
| Scott Wakeling | 9ee23f4 | 2015-07-23 10:44:35 +0100 | [diff] [blame] | 194 | static void GenNumberOfTrailingZeros(LocationSummary* locations, | 
 | 195 |                                      Primitive::Type type, | 
 | 196 |                                      ArmAssembler* assembler) { | 
 | 197 |   DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong)); | 
 | 198 |  | 
 | 199 |   Register out = locations->Out().AsRegister<Register>(); | 
 | 200 |  | 
 | 201 |   if (type == Primitive::kPrimLong) { | 
 | 202 |     Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>(); | 
 | 203 |     Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); | 
 | 204 |     Label end; | 
 | 205 |     __ rbit(out, in_reg_lo); | 
 | 206 |     __ clz(out, out); | 
 | 207 |     __ CompareAndBranchIfNonZero(in_reg_lo, &end); | 
 | 208 |     __ rbit(out, in_reg_hi); | 
 | 209 |     __ clz(out, out); | 
 | 210 |     __ AddConstant(out, 32); | 
 | 211 |     __ Bind(&end); | 
 | 212 |   } else { | 
 | 213 |     Register in = locations->InAt(0).AsRegister<Register>(); | 
 | 214 |     __ rbit(out, in); | 
 | 215 |     __ clz(out, out); | 
 | 216 |   } | 
 | 217 | } | 
 | 218 |  | 
 | 219 | void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { | 
 | 220 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 221 |                                                             LocationSummary::kNoCall, | 
 | 222 |                                                             kIntrinsified); | 
 | 223 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 224 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 225 | } | 
 | 226 |  | 
 | 227 | void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { | 
 | 228 |   GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler()); | 
 | 229 | } | 
 | 230 |  | 
 | 231 | void IntrinsicLocationsBuilderARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { | 
 | 232 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 233 |                                                             LocationSummary::kNoCall, | 
 | 234 |                                                             kIntrinsified); | 
 | 235 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 236 |   locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); | 
 | 237 | } | 
 | 238 |  | 
 | 239 | void IntrinsicCodeGeneratorARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { | 
 | 240 |   GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler()); | 
 | 241 | } | 
 | 242 |  | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 243 | static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { | 
 | 244 |   Location in = locations->InAt(0); | 
 | 245 |   Location out = locations->Out(); | 
 | 246 |  | 
 | 247 |   if (is64bit) { | 
 | 248 |     __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), | 
 | 249 |              FromLowSToD(in.AsFpuRegisterPairLow<SRegister>())); | 
 | 250 |   } else { | 
 | 251 |     __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>()); | 
 | 252 |   } | 
 | 253 | } | 
 | 254 |  | 
 | 255 | void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) { | 
 | 256 |   CreateFPToFPLocations(arena_, invoke); | 
 | 257 | } | 
 | 258 |  | 
 | 259 | void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 260 |   MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 261 | } | 
 | 262 |  | 
 | 263 | void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) { | 
 | 264 |   CreateFPToFPLocations(arena_, invoke); | 
 | 265 | } | 
 | 266 |  | 
 | 267 | void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 268 |   MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 269 | } | 
 | 270 |  | 
 | 271 | static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) { | 
 | 272 |   LocationSummary* locations = new (arena) LocationSummary(invoke, | 
 | 273 |                                                            LocationSummary::kNoCall, | 
 | 274 |                                                            kIntrinsified); | 
 | 275 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 276 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 277 |  | 
 | 278 |   locations->AddTemp(Location::RequiresRegister()); | 
 | 279 | } | 
 | 280 |  | 
 | 281 | static void GenAbsInteger(LocationSummary* locations, | 
 | 282 |                           bool is64bit, | 
 | 283 |                           ArmAssembler* assembler) { | 
 | 284 |   Location in = locations->InAt(0); | 
 | 285 |   Location output = locations->Out(); | 
 | 286 |  | 
 | 287 |   Register mask = locations->GetTemp(0).AsRegister<Register>(); | 
 | 288 |  | 
 | 289 |   if (is64bit) { | 
 | 290 |     Register in_reg_lo = in.AsRegisterPairLow<Register>(); | 
 | 291 |     Register in_reg_hi = in.AsRegisterPairHigh<Register>(); | 
 | 292 |     Register out_reg_lo = output.AsRegisterPairLow<Register>(); | 
 | 293 |     Register out_reg_hi = output.AsRegisterPairHigh<Register>(); | 
 | 294 |  | 
 | 295 |     DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected."; | 
 | 296 |  | 
 | 297 |     __ Asr(mask, in_reg_hi, 31); | 
 | 298 |     __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask)); | 
 | 299 |     __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask)); | 
 | 300 |     __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo)); | 
 | 301 |     __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi)); | 
 | 302 |   } else { | 
 | 303 |     Register in_reg = in.AsRegister<Register>(); | 
 | 304 |     Register out_reg = output.AsRegister<Register>(); | 
 | 305 |  | 
 | 306 |     __ Asr(mask, in_reg, 31); | 
 | 307 |     __ add(out_reg, in_reg, ShifterOperand(mask)); | 
 | 308 |     __ eor(out_reg, mask, ShifterOperand(out_reg)); | 
 | 309 |   } | 
 | 310 | } | 
 | 311 |  | 
 | 312 | void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) { | 
 | 313 |   CreateIntToIntPlusTemp(arena_, invoke); | 
 | 314 | } | 
 | 315 |  | 
 | 316 | void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 317 |   GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 318 | } | 
 | 319 |  | 
 | 320 |  | 
 | 321 | void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) { | 
 | 322 |   CreateIntToIntPlusTemp(arena_, invoke); | 
 | 323 | } | 
 | 324 |  | 
 | 325 | void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 326 |   GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 327 | } | 
 | 328 |  | 
 | 329 | static void GenMinMax(LocationSummary* locations, | 
 | 330 |                       bool is_min, | 
 | 331 |                       ArmAssembler* assembler) { | 
 | 332 |   Register op1 = locations->InAt(0).AsRegister<Register>(); | 
 | 333 |   Register op2 = locations->InAt(1).AsRegister<Register>(); | 
 | 334 |   Register out = locations->Out().AsRegister<Register>(); | 
 | 335 |  | 
 | 336 |   __ cmp(op1, ShifterOperand(op2)); | 
 | 337 |  | 
 | 338 |   __ it((is_min) ? Condition::LT : Condition::GT, kItElse); | 
 | 339 |   __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT); | 
 | 340 |   __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE); | 
 | 341 | } | 
 | 342 |  | 
 | 343 | static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { | 
 | 344 |   LocationSummary* locations = new (arena) LocationSummary(invoke, | 
 | 345 |                                                            LocationSummary::kNoCall, | 
 | 346 |                                                            kIntrinsified); | 
 | 347 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 348 |   locations->SetInAt(1, Location::RequiresRegister()); | 
 | 349 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 350 | } | 
 | 351 |  | 
 | 352 | void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) { | 
 | 353 |   CreateIntIntToIntLocations(arena_, invoke); | 
 | 354 | } | 
 | 355 |  | 
 | 356 | void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 357 |   GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler()); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 358 | } | 
 | 359 |  | 
 | 360 | void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) { | 
 | 361 |   CreateIntIntToIntLocations(arena_, invoke); | 
 | 362 | } | 
 | 363 |  | 
 | 364 | void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 365 |   GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler()); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 366 | } | 
 | 367 |  | 
 | 368 | void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) { | 
 | 369 |   CreateFPToFPLocations(arena_, invoke); | 
 | 370 | } | 
 | 371 |  | 
 | 372 | void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) { | 
 | 373 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 374 |   ArmAssembler* assembler = GetAssembler(); | 
 | 375 |   __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()), | 
 | 376 |             FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>())); | 
 | 377 | } | 
 | 378 |  | 
 | 379 | void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) { | 
 | 380 |   CreateIntToIntLocations(arena_, invoke); | 
 | 381 | } | 
 | 382 |  | 
 | 383 | void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) { | 
 | 384 |   ArmAssembler* assembler = GetAssembler(); | 
 | 385 |   // Ignore upper 4B of long address. | 
 | 386 |   __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(), | 
 | 387 |            Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); | 
 | 388 | } | 
 | 389 |  | 
 | 390 | void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) { | 
 | 391 |   CreateIntToIntLocations(arena_, invoke); | 
 | 392 | } | 
 | 393 |  | 
 | 394 | void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) { | 
 | 395 |   ArmAssembler* assembler = GetAssembler(); | 
 | 396 |   // Ignore upper 4B of long address. | 
 | 397 |   __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(), | 
 | 398 |          Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); | 
 | 399 | } | 
 | 400 |  | 
 | 401 | void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) { | 
 | 402 |   CreateIntToIntLocations(arena_, invoke); | 
 | 403 | } | 
 | 404 |  | 
 | 405 | void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) { | 
 | 406 |   ArmAssembler* assembler = GetAssembler(); | 
 | 407 |   // Ignore upper 4B of long address. | 
 | 408 |   Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); | 
 | 409 |   // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor | 
 | 410 |   // exception. So we can't use ldrd as addr may be unaligned. | 
 | 411 |   Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>(); | 
 | 412 |   Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>(); | 
 | 413 |   if (addr == lo) { | 
 | 414 |     __ ldr(hi, Address(addr, 4)); | 
 | 415 |     __ ldr(lo, Address(addr, 0)); | 
 | 416 |   } else { | 
 | 417 |     __ ldr(lo, Address(addr, 0)); | 
 | 418 |     __ ldr(hi, Address(addr, 4)); | 
 | 419 |   } | 
 | 420 | } | 
 | 421 |  | 
 | 422 | void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) { | 
 | 423 |   CreateIntToIntLocations(arena_, invoke); | 
 | 424 | } | 
 | 425 |  | 
 | 426 | void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) { | 
 | 427 |   ArmAssembler* assembler = GetAssembler(); | 
 | 428 |   // Ignore upper 4B of long address. | 
 | 429 |   __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(), | 
 | 430 |            Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); | 
 | 431 | } | 
 | 432 |  | 
 | 433 | static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) { | 
 | 434 |   LocationSummary* locations = new (arena) LocationSummary(invoke, | 
 | 435 |                                                            LocationSummary::kNoCall, | 
 | 436 |                                                            kIntrinsified); | 
 | 437 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 438 |   locations->SetInAt(1, Location::RequiresRegister()); | 
 | 439 | } | 
 | 440 |  | 
 | 441 | void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) { | 
 | 442 |   CreateIntIntToVoidLocations(arena_, invoke); | 
 | 443 | } | 
 | 444 |  | 
 | 445 | void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) { | 
 | 446 |   ArmAssembler* assembler = GetAssembler(); | 
 | 447 |   __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(), | 
 | 448 |           Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); | 
 | 449 | } | 
 | 450 |  | 
 | 451 | void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) { | 
 | 452 |   CreateIntIntToVoidLocations(arena_, invoke); | 
 | 453 | } | 
 | 454 |  | 
 | 455 | void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) { | 
 | 456 |   ArmAssembler* assembler = GetAssembler(); | 
 | 457 |   __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(), | 
 | 458 |          Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); | 
 | 459 | } | 
 | 460 |  | 
 | 461 | void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) { | 
 | 462 |   CreateIntIntToVoidLocations(arena_, invoke); | 
 | 463 | } | 
 | 464 |  | 
 | 465 | void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) { | 
 | 466 |   ArmAssembler* assembler = GetAssembler(); | 
 | 467 |   // Ignore upper 4B of long address. | 
 | 468 |   Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); | 
 | 469 |   // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor | 
 | 470 |   // exception. So we can't use ldrd as addr may be unaligned. | 
 | 471 |   __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0)); | 
 | 472 |   __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4)); | 
 | 473 | } | 
 | 474 |  | 
 | 475 | void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) { | 
 | 476 |   CreateIntIntToVoidLocations(arena_, invoke); | 
 | 477 | } | 
 | 478 |  | 
 | 479 | void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) { | 
 | 480 |   ArmAssembler* assembler = GetAssembler(); | 
 | 481 |   __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(), | 
 | 482 |           Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); | 
 | 483 | } | 
 | 484 |  | 
 | 485 | void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) { | 
 | 486 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 487 |                                                             LocationSummary::kNoCall, | 
 | 488 |                                                             kIntrinsified); | 
 | 489 |   locations->SetOut(Location::RequiresRegister()); | 
 | 490 | } | 
 | 491 |  | 
 | 492 | void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) { | 
 | 493 |   ArmAssembler* assembler = GetAssembler(); | 
 | 494 |   __ LoadFromOffset(kLoadWord, | 
 | 495 |                     invoke->GetLocations()->Out().AsRegister<Register>(), | 
 | 496 |                     TR, | 
 | 497 |                     Thread::PeerOffset<kArmPointerSize>().Int32Value()); | 
 | 498 | } | 
 | 499 |  | 
 | 500 | static void GenUnsafeGet(HInvoke* invoke, | 
 | 501 |                          Primitive::Type type, | 
 | 502 |                          bool is_volatile, | 
 | 503 |                          CodeGeneratorARM* codegen) { | 
 | 504 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 505 |   DCHECK((type == Primitive::kPrimInt) || | 
 | 506 |          (type == Primitive::kPrimLong) || | 
 | 507 |          (type == Primitive::kPrimNot)); | 
 | 508 |   ArmAssembler* assembler = codegen->GetAssembler(); | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 509 |   Location base_loc = locations->InAt(1); | 
 | 510 |   Register base = base_loc.AsRegister<Register>();             // Object pointer. | 
 | 511 |   Location offset_loc = locations->InAt(2); | 
 | 512 |   Register offset = offset_loc.AsRegisterPairLow<Register>();  // Long offset, lo part only. | 
 | 513 |   Location trg_loc = locations->Out(); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 514 |  | 
 | 515 |   if (type == Primitive::kPrimLong) { | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 516 |     Register trg_lo = trg_loc.AsRegisterPairLow<Register>(); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 517 |     __ add(IP, base, ShifterOperand(offset)); | 
 | 518 |     if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) { | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 519 |       Register trg_hi = trg_loc.AsRegisterPairHigh<Register>(); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 520 |       __ ldrexd(trg_lo, trg_hi, IP); | 
 | 521 |     } else { | 
 | 522 |       __ ldrd(trg_lo, Address(IP)); | 
 | 523 |     } | 
 | 524 |   } else { | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 525 |     Register trg = trg_loc.AsRegister<Register>(); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 526 |     __ ldr(trg, Address(base, offset)); | 
 | 527 |   } | 
 | 528 |  | 
 | 529 |   if (is_volatile) { | 
 | 530 |     __ dmb(ISH); | 
 | 531 |   } | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 532 |  | 
 | 533 |   if (type == Primitive::kPrimNot) { | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 534 |     codegen->MaybeGenerateReadBarrier(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc); | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 535 |   } | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 536 | } | 
 | 537 |  | 
 | 538 | static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 539 |   bool can_call = kEmitCompilerReadBarrier && | 
 | 540 |       (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject || | 
 | 541 |        invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 542 |   LocationSummary* locations = new (arena) LocationSummary(invoke, | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 543 |                                                            can_call ? | 
 | 544 |                                                                LocationSummary::kCallOnSlowPath : | 
 | 545 |                                                                LocationSummary::kNoCall, | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 546 |                                                            kIntrinsified); | 
 | 547 |   locations->SetInAt(0, Location::NoLocation());        // Unused receiver. | 
 | 548 |   locations->SetInAt(1, Location::RequiresRegister()); | 
 | 549 |   locations->SetInAt(2, Location::RequiresRegister()); | 
 | 550 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 551 | } | 
 | 552 |  | 
 | 553 | void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) { | 
 | 554 |   CreateIntIntIntToIntLocations(arena_, invoke); | 
 | 555 | } | 
 | 556 | void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) { | 
 | 557 |   CreateIntIntIntToIntLocations(arena_, invoke); | 
 | 558 | } | 
 | 559 | void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) { | 
 | 560 |   CreateIntIntIntToIntLocations(arena_, invoke); | 
 | 561 | } | 
 | 562 | void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) { | 
 | 563 |   CreateIntIntIntToIntLocations(arena_, invoke); | 
 | 564 | } | 
 | 565 | void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) { | 
 | 566 |   CreateIntIntIntToIntLocations(arena_, invoke); | 
 | 567 | } | 
 | 568 | void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { | 
 | 569 |   CreateIntIntIntToIntLocations(arena_, invoke); | 
 | 570 | } | 
 | 571 |  | 
 | 572 | void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 573 |   GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 574 | } | 
 | 575 | void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 576 |   GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 577 | } | 
 | 578 | void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 579 |   GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 580 | } | 
 | 581 | void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 582 |   GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 583 | } | 
 | 584 | void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 585 |   GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 586 | } | 
 | 587 | void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 588 |   GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 589 | } | 
 | 590 |  | 
 | 591 | static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, | 
 | 592 |                                      const ArmInstructionSetFeatures& features, | 
 | 593 |                                      Primitive::Type type, | 
 | 594 |                                      bool is_volatile, | 
 | 595 |                                      HInvoke* invoke) { | 
 | 596 |   LocationSummary* locations = new (arena) LocationSummary(invoke, | 
 | 597 |                                                            LocationSummary::kNoCall, | 
 | 598 |                                                            kIntrinsified); | 
 | 599 |   locations->SetInAt(0, Location::NoLocation());        // Unused receiver. | 
 | 600 |   locations->SetInAt(1, Location::RequiresRegister()); | 
 | 601 |   locations->SetInAt(2, Location::RequiresRegister()); | 
 | 602 |   locations->SetInAt(3, Location::RequiresRegister()); | 
 | 603 |  | 
 | 604 |   if (type == Primitive::kPrimLong) { | 
 | 605 |     // Potentially need temps for ldrexd-strexd loop. | 
 | 606 |     if (is_volatile && !features.HasAtomicLdrdAndStrd()) { | 
 | 607 |       locations->AddTemp(Location::RequiresRegister());  // Temp_lo. | 
 | 608 |       locations->AddTemp(Location::RequiresRegister());  // Temp_hi. | 
 | 609 |     } | 
 | 610 |   } else if (type == Primitive::kPrimNot) { | 
 | 611 |     // Temps for card-marking. | 
 | 612 |     locations->AddTemp(Location::RequiresRegister());  // Temp. | 
 | 613 |     locations->AddTemp(Location::RequiresRegister());  // Card. | 
 | 614 |   } | 
 | 615 | } | 
 | 616 |  | 
 | 617 | void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 618 |   CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 619 | } | 
 | 620 | void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 621 |   CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 622 | } | 
 | 623 | void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 624 |   CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ true, invoke); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 625 | } | 
 | 626 | void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 627 |   CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 628 | } | 
 | 629 | void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 630 |   CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 631 | } | 
 | 632 | void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 633 |   CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ true, invoke); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 634 | } | 
 | 635 | void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 636 |   CreateIntIntIntIntToVoid( | 
 | 637 |       arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 638 | } | 
 | 639 | void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 640 |   CreateIntIntIntIntToVoid( | 
 | 641 |       arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 642 | } | 
 | 643 | void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 644 |   CreateIntIntIntIntToVoid( | 
 | 645 |       arena_, features_, Primitive::kPrimLong, /* is_volatile */ true, invoke); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 646 | } | 
 | 647 |  | 
 | 648 | static void GenUnsafePut(LocationSummary* locations, | 
 | 649 |                          Primitive::Type type, | 
 | 650 |                          bool is_volatile, | 
 | 651 |                          bool is_ordered, | 
 | 652 |                          CodeGeneratorARM* codegen) { | 
 | 653 |   ArmAssembler* assembler = codegen->GetAssembler(); | 
 | 654 |  | 
 | 655 |   Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer. | 
 | 656 |   Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Long offset, lo part only. | 
 | 657 |   Register value; | 
 | 658 |  | 
 | 659 |   if (is_volatile || is_ordered) { | 
 | 660 |     __ dmb(ISH); | 
 | 661 |   } | 
 | 662 |  | 
 | 663 |   if (type == Primitive::kPrimLong) { | 
 | 664 |     Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>(); | 
 | 665 |     value = value_lo; | 
 | 666 |     if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) { | 
 | 667 |       Register temp_lo = locations->GetTemp(0).AsRegister<Register>(); | 
 | 668 |       Register temp_hi = locations->GetTemp(1).AsRegister<Register>(); | 
 | 669 |       Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>(); | 
 | 670 |  | 
 | 671 |       __ add(IP, base, ShifterOperand(offset)); | 
 | 672 |       Label loop_head; | 
 | 673 |       __ Bind(&loop_head); | 
 | 674 |       __ ldrexd(temp_lo, temp_hi, IP); | 
 | 675 |       __ strexd(temp_lo, value_lo, value_hi, IP); | 
 | 676 |       __ cmp(temp_lo, ShifterOperand(0)); | 
 | 677 |       __ b(&loop_head, NE); | 
 | 678 |     } else { | 
 | 679 |       __ add(IP, base, ShifterOperand(offset)); | 
 | 680 |       __ strd(value_lo, Address(IP)); | 
 | 681 |     } | 
 | 682 |   } else { | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 683 |     value = locations->InAt(3).AsRegister<Register>(); | 
 | 684 |     Register source = value; | 
 | 685 |     if (kPoisonHeapReferences && type == Primitive::kPrimNot) { | 
 | 686 |       Register temp = locations->GetTemp(0).AsRegister<Register>(); | 
 | 687 |       __ Mov(temp, value); | 
 | 688 |       __ PoisonHeapReference(temp); | 
 | 689 |       source = temp; | 
 | 690 |     } | 
 | 691 |     __ str(source, Address(base, offset)); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 692 |   } | 
 | 693 |  | 
 | 694 |   if (is_volatile) { | 
 | 695 |     __ dmb(ISH); | 
 | 696 |   } | 
 | 697 |  | 
 | 698 |   if (type == Primitive::kPrimNot) { | 
 | 699 |     Register temp = locations->GetTemp(0).AsRegister<Register>(); | 
 | 700 |     Register card = locations->GetTemp(1).AsRegister<Register>(); | 
| Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 701 |     bool value_can_be_null = true;  // TODO: Worth finding out this information? | 
 | 702 |     codegen->MarkGCCard(temp, card, base, value, value_can_be_null); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 703 |   } | 
 | 704 | } | 
 | 705 |  | 
 | 706 | void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 707 |   GenUnsafePut(invoke->GetLocations(), | 
 | 708 |                Primitive::kPrimInt, | 
 | 709 |                /* is_volatile */ false, | 
 | 710 |                /* is_ordered */ false, | 
 | 711 |                codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 712 | } | 
 | 713 | void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 714 |   GenUnsafePut(invoke->GetLocations(), | 
 | 715 |                Primitive::kPrimInt, | 
 | 716 |                /* is_volatile */ false, | 
 | 717 |                /* is_ordered */ true, | 
 | 718 |                codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 719 | } | 
 | 720 | void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 721 |   GenUnsafePut(invoke->GetLocations(), | 
 | 722 |                Primitive::kPrimInt, | 
 | 723 |                /* is_volatile */ true, | 
 | 724 |                /* is_ordered */ false, | 
 | 725 |                codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 726 | } | 
 | 727 | void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 728 |   GenUnsafePut(invoke->GetLocations(), | 
 | 729 |                Primitive::kPrimNot, | 
 | 730 |                /* is_volatile */ false, | 
 | 731 |                /* is_ordered */ false, | 
 | 732 |                codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 733 | } | 
 | 734 | void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 735 |   GenUnsafePut(invoke->GetLocations(), | 
 | 736 |                Primitive::kPrimNot, | 
 | 737 |                /* is_volatile */ false, | 
 | 738 |                /* is_ordered */ true, | 
 | 739 |                codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 740 | } | 
 | 741 | void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 742 |   GenUnsafePut(invoke->GetLocations(), | 
 | 743 |                Primitive::kPrimNot, | 
 | 744 |                /* is_volatile */ true, | 
 | 745 |                /* is_ordered */ false, | 
 | 746 |                codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 747 | } | 
 | 748 | void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 749 |   GenUnsafePut(invoke->GetLocations(), | 
 | 750 |                Primitive::kPrimLong, | 
 | 751 |                /* is_volatile */ false, | 
 | 752 |                /* is_ordered */ false, | 
 | 753 |                codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 754 | } | 
 | 755 | void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 756 |   GenUnsafePut(invoke->GetLocations(), | 
 | 757 |                Primitive::kPrimLong, | 
 | 758 |                /* is_volatile */ false, | 
 | 759 |                /* is_ordered */ true, | 
 | 760 |                codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 761 | } | 
 | 762 | void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 763 |   GenUnsafePut(invoke->GetLocations(), | 
 | 764 |                Primitive::kPrimLong, | 
 | 765 |                /* is_volatile */ true, | 
 | 766 |                /* is_ordered */ false, | 
 | 767 |                codegen_); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 768 | } | 
 | 769 |  | 
 | 770 | static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena, | 
 | 771 |                                                 HInvoke* invoke) { | 
 | 772 |   LocationSummary* locations = new (arena) LocationSummary(invoke, | 
 | 773 |                                                            LocationSummary::kNoCall, | 
 | 774 |                                                            kIntrinsified); | 
 | 775 |   locations->SetInAt(0, Location::NoLocation());        // Unused receiver. | 
 | 776 |   locations->SetInAt(1, Location::RequiresRegister()); | 
 | 777 |   locations->SetInAt(2, Location::RequiresRegister()); | 
 | 778 |   locations->SetInAt(3, Location::RequiresRegister()); | 
 | 779 |   locations->SetInAt(4, Location::RequiresRegister()); | 
 | 780 |  | 
 | 781 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 782 |  | 
 | 783 |   locations->AddTemp(Location::RequiresRegister());  // Pointer. | 
 | 784 |   locations->AddTemp(Location::RequiresRegister());  // Temp 1. | 
 | 785 |   locations->AddTemp(Location::RequiresRegister());  // Temp 2. | 
 | 786 | } | 
 | 787 |  | 
 | 788 | static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) { | 
 | 789 |   DCHECK_NE(type, Primitive::kPrimLong); | 
 | 790 |  | 
 | 791 |   ArmAssembler* assembler = codegen->GetAssembler(); | 
 | 792 |  | 
 | 793 |   Register out = locations->Out().AsRegister<Register>();              // Boolean result. | 
 | 794 |  | 
 | 795 |   Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer. | 
 | 796 |   Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Offset (discard high 4B). | 
 | 797 |   Register expected_lo = locations->InAt(3).AsRegister<Register>();    // Expected. | 
 | 798 |   Register value_lo = locations->InAt(4).AsRegister<Register>();       // Value. | 
 | 799 |  | 
 | 800 |   Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>();     // Pointer to actual memory. | 
 | 801 |   Register tmp_lo = locations->GetTemp(1).AsRegister<Register>();      // Value in memory. | 
 | 802 |  | 
 | 803 |   if (type == Primitive::kPrimNot) { | 
 | 804 |     // Mark card for object assuming new value is stored. Worst case we will mark an unchanged | 
 | 805 |     // object and scan the receiver at the next GC for nothing. | 
| Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 806 |     bool value_can_be_null = true;  // TODO: Worth finding out this information? | 
 | 807 |     codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 808 |   } | 
 | 809 |  | 
 | 810 |   // Prevent reordering with prior memory operations. | 
 | 811 |   __ dmb(ISH); | 
 | 812 |  | 
 | 813 |   __ add(tmp_ptr, base, ShifterOperand(offset)); | 
 | 814 |  | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 815 |   if (kPoisonHeapReferences && type == Primitive::kPrimNot) { | 
 | 816 |     codegen->GetAssembler()->PoisonHeapReference(expected_lo); | 
 | 817 |     codegen->GetAssembler()->PoisonHeapReference(value_lo); | 
 | 818 |   } | 
 | 819 |  | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 820 |   // do { | 
 | 821 |   //   tmp = [r_ptr] - expected; | 
 | 822 |   // } while (tmp == 0 && failure([r_ptr] <- r_new_value)); | 
 | 823 |   // result = tmp != 0; | 
 | 824 |  | 
 | 825 |   Label loop_head; | 
 | 826 |   __ Bind(&loop_head); | 
 | 827 |  | 
 | 828 |   __ ldrex(tmp_lo, tmp_ptr); | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 829 |   // TODO: Do we need a read barrier here when `type == Primitive::kPrimNot`? | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 830 |  | 
 | 831 |   __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo)); | 
 | 832 |  | 
 | 833 |   __ it(EQ, ItState::kItT); | 
 | 834 |   __ strex(tmp_lo, value_lo, tmp_ptr, EQ); | 
 | 835 |   __ cmp(tmp_lo, ShifterOperand(1), EQ); | 
 | 836 |  | 
 | 837 |   __ b(&loop_head, EQ); | 
 | 838 |  | 
 | 839 |   __ dmb(ISH); | 
 | 840 |  | 
 | 841 |   __ rsbs(out, tmp_lo, ShifterOperand(1)); | 
 | 842 |   __ it(CC); | 
 | 843 |   __ mov(out, ShifterOperand(0), CC); | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 844 |  | 
 | 845 |   if (kPoisonHeapReferences && type == Primitive::kPrimNot) { | 
 | 846 |     codegen->GetAssembler()->UnpoisonHeapReference(value_lo); | 
 | 847 |     codegen->GetAssembler()->UnpoisonHeapReference(expected_lo); | 
 | 848 |   } | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 849 | } | 
 | 850 |  | 
| Andreas Gampe | ca71458 | 2015-04-03 19:41:34 -0700 | [diff] [blame] | 851 | void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) { | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 852 |   CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke); | 
 | 853 | } | 
| Andreas Gampe | ca71458 | 2015-04-03 19:41:34 -0700 | [diff] [blame] | 854 | void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) { | 
| Roland Levillain | 985ff70 | 2015-10-23 13:25:35 +0100 | [diff] [blame] | 855 |   // The UnsafeCASObject intrinsic does not always work when heap | 
 | 856 |   // poisoning is enabled (it breaks run-test 004-UnsafeTest); turn it | 
 | 857 |   // off temporarily as a quick fix. | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 858 |   // | 
| Roland Levillain | 985ff70 | 2015-10-23 13:25:35 +0100 | [diff] [blame] | 859 |   // TODO(rpl): Fix it and turn it back on. | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 860 |   // | 
 | 861 |   // TODO(rpl): Also, we should investigate whether we need a read | 
 | 862 |   // barrier in the generated code. | 
| Roland Levillain | 985ff70 | 2015-10-23 13:25:35 +0100 | [diff] [blame] | 863 |   if (kPoisonHeapReferences) { | 
 | 864 |     return; | 
 | 865 |   } | 
 | 866 |  | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 867 |   CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke); | 
 | 868 | } | 
 | 869 | void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) { | 
 | 870 |   GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_); | 
 | 871 | } | 
 | 872 | void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) { | 
 | 873 |   GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_); | 
 | 874 | } | 
 | 875 |  | 
 | 876 | void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) { | 
 | 877 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 878 |                                                             LocationSummary::kCallOnSlowPath, | 
 | 879 |                                                             kIntrinsified); | 
 | 880 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 881 |   locations->SetInAt(1, Location::RequiresRegister()); | 
 | 882 |   locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); | 
 | 883 |  | 
 | 884 |   locations->AddTemp(Location::RequiresRegister()); | 
 | 885 |   locations->AddTemp(Location::RequiresRegister()); | 
 | 886 | } | 
 | 887 |  | 
 | 888 | void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) { | 
 | 889 |   ArmAssembler* assembler = GetAssembler(); | 
 | 890 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 891 |  | 
 | 892 |   // Location of reference to data array | 
 | 893 |   const MemberOffset value_offset = mirror::String::ValueOffset(); | 
 | 894 |   // Location of count | 
 | 895 |   const MemberOffset count_offset = mirror::String::CountOffset(); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 896 |  | 
 | 897 |   Register obj = locations->InAt(0).AsRegister<Register>();  // String object pointer. | 
 | 898 |   Register idx = locations->InAt(1).AsRegister<Register>();  // Index of character. | 
 | 899 |   Register out = locations->Out().AsRegister<Register>();    // Result character. | 
 | 900 |  | 
 | 901 |   Register temp = locations->GetTemp(0).AsRegister<Register>(); | 
 | 902 |   Register array_temp = locations->GetTemp(1).AsRegister<Register>(); | 
 | 903 |  | 
 | 904 |   // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth | 
 | 905 |   //       the cost. | 
 | 906 |   // TODO: For simplicity, the index parameter is requested in a register, so different from Quick | 
 | 907 |   //       we will not optimize the code for constants (which would save a register). | 
 | 908 |  | 
| Andreas Gampe | 85b62f2 | 2015-09-09 13:15:38 -0700 | [diff] [blame] | 909 |   SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke); | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 910 |   codegen_->AddSlowPath(slow_path); | 
 | 911 |  | 
 | 912 |   __ ldr(temp, Address(obj, count_offset.Int32Value()));          // temp = str.length. | 
 | 913 |   codegen_->MaybeRecordImplicitNullCheck(invoke); | 
 | 914 |   __ cmp(idx, ShifterOperand(temp)); | 
 | 915 |   __ b(slow_path->GetEntryLabel(), CS); | 
 | 916 |  | 
| Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 917 |   __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value()));  // array_temp := str.value. | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 918 |  | 
 | 919 |   // Load the value. | 
| Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 920 |   __ ldrh(out, Address(array_temp, idx, LSL, 1));                 // out := array_temp[idx]. | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 921 |  | 
 | 922 |   __ Bind(slow_path->GetExitLabel()); | 
 | 923 | } | 
 | 924 |  | 
| Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 925 | void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) { | 
 | 926 |   // The inputs plus one temp. | 
 | 927 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 928 |                                                             LocationSummary::kCall, | 
 | 929 |                                                             kIntrinsified); | 
 | 930 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 931 |   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); | 
 | 932 |   locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); | 
 | 933 |   locations->SetOut(Location::RegisterLocation(R0)); | 
 | 934 | } | 
 | 935 |  | 
 | 936 | void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) { | 
 | 937 |   ArmAssembler* assembler = GetAssembler(); | 
 | 938 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 939 |  | 
| Nicolas Geoffray | 512e04d | 2015-03-27 17:21:24 +0000 | [diff] [blame] | 940 |   // Note that the null check must have been done earlier. | 
| Calin Juravle | 641547a | 2015-04-21 22:08:51 +0100 | [diff] [blame] | 941 |   DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); | 
| Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 942 |  | 
 | 943 |   Register argument = locations->InAt(1).AsRegister<Register>(); | 
 | 944 |   __ cmp(argument, ShifterOperand(0)); | 
| Andreas Gampe | 85b62f2 | 2015-09-09 13:15:38 -0700 | [diff] [blame] | 945 |   SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke); | 
| Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 946 |   codegen_->AddSlowPath(slow_path); | 
 | 947 |   __ b(slow_path->GetEntryLabel(), EQ); | 
 | 948 |  | 
 | 949 |   __ LoadFromOffset( | 
 | 950 |       kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value()); | 
 | 951 |   __ blx(LR); | 
 | 952 |   __ Bind(slow_path->GetExitLabel()); | 
 | 953 | } | 
 | 954 |  | 
| Agi Csaki | 289cd55 | 2015-08-18 17:10:38 -0700 | [diff] [blame] | 955 | void IntrinsicLocationsBuilderARM::VisitStringEquals(HInvoke* invoke) { | 
 | 956 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 957 |                                                             LocationSummary::kNoCall, | 
 | 958 |                                                             kIntrinsified); | 
 | 959 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 960 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 961 |   locations->SetInAt(1, Location::RequiresRegister()); | 
 | 962 |   // Temporary registers to store lengths of strings and for calculations. | 
 | 963 |   // Using instruction cbz requires a low register, so explicitly set a temp to be R0. | 
 | 964 |   locations->AddTemp(Location::RegisterLocation(R0)); | 
 | 965 |   locations->AddTemp(Location::RequiresRegister()); | 
 | 966 |   locations->AddTemp(Location::RequiresRegister()); | 
 | 967 |  | 
 | 968 |   locations->SetOut(Location::RequiresRegister()); | 
 | 969 | } | 
 | 970 |  | 
 | 971 | void IntrinsicCodeGeneratorARM::VisitStringEquals(HInvoke* invoke) { | 
 | 972 |   ArmAssembler* assembler = GetAssembler(); | 
 | 973 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 974 |  | 
 | 975 |   Register str = locations->InAt(0).AsRegister<Register>(); | 
 | 976 |   Register arg = locations->InAt(1).AsRegister<Register>(); | 
 | 977 |   Register out = locations->Out().AsRegister<Register>(); | 
 | 978 |  | 
 | 979 |   Register temp = locations->GetTemp(0).AsRegister<Register>(); | 
 | 980 |   Register temp1 = locations->GetTemp(1).AsRegister<Register>(); | 
 | 981 |   Register temp2 = locations->GetTemp(2).AsRegister<Register>(); | 
 | 982 |  | 
 | 983 |   Label loop; | 
 | 984 |   Label end; | 
 | 985 |   Label return_true; | 
 | 986 |   Label return_false; | 
 | 987 |  | 
 | 988 |   // Get offsets of count, value, and class fields within a string object. | 
 | 989 |   const uint32_t count_offset = mirror::String::CountOffset().Uint32Value(); | 
 | 990 |   const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value(); | 
 | 991 |   const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value(); | 
 | 992 |  | 
 | 993 |   // Note that the null check must have been done earlier. | 
 | 994 |   DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); | 
 | 995 |  | 
 | 996 |   // Check if input is null, return false if it is. | 
 | 997 |   __ CompareAndBranchIfZero(arg, &return_false); | 
 | 998 |  | 
 | 999 |   // Instanceof check for the argument by comparing class fields. | 
 | 1000 |   // All string objects must have the same type since String cannot be subclassed. | 
 | 1001 |   // Receiver must be a string object, so its class field is equal to all strings' class fields. | 
 | 1002 |   // If the argument is a string object, its class field must be equal to receiver's class field. | 
 | 1003 |   __ ldr(temp, Address(str, class_offset)); | 
 | 1004 |   __ ldr(temp1, Address(arg, class_offset)); | 
 | 1005 |   __ cmp(temp, ShifterOperand(temp1)); | 
 | 1006 |   __ b(&return_false, NE); | 
 | 1007 |  | 
 | 1008 |   // Load lengths of this and argument strings. | 
 | 1009 |   __ ldr(temp, Address(str, count_offset)); | 
 | 1010 |   __ ldr(temp1, Address(arg, count_offset)); | 
 | 1011 |   // Check if lengths are equal, return false if they're not. | 
 | 1012 |   __ cmp(temp, ShifterOperand(temp1)); | 
 | 1013 |   __ b(&return_false, NE); | 
 | 1014 |   // Return true if both strings are empty. | 
 | 1015 |   __ cbz(temp, &return_true); | 
 | 1016 |  | 
 | 1017 |   // Reference equality check, return true if same reference. | 
 | 1018 |   __ cmp(str, ShifterOperand(arg)); | 
 | 1019 |   __ b(&return_true, EQ); | 
 | 1020 |  | 
 | 1021 |   // Assertions that must hold in order to compare strings 2 characters at a time. | 
 | 1022 |   DCHECK_ALIGNED(value_offset, 4); | 
 | 1023 |   static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded"); | 
 | 1024 |  | 
| Agi Csaki | 289cd55 | 2015-08-18 17:10:38 -0700 | [diff] [blame] | 1025 |   __ LoadImmediate(temp1, value_offset); | 
| Agi Csaki | 289cd55 | 2015-08-18 17:10:38 -0700 | [diff] [blame] | 1026 |  | 
 | 1027 |   // Loop to compare strings 2 characters at a time starting at the front of the string. | 
 | 1028 |   // Ok to do this because strings with an odd length are zero-padded. | 
 | 1029 |   __ Bind(&loop); | 
 | 1030 |   __ ldr(out, Address(str, temp1)); | 
 | 1031 |   __ ldr(temp2, Address(arg, temp1)); | 
 | 1032 |   __ cmp(out, ShifterOperand(temp2)); | 
 | 1033 |   __ b(&return_false, NE); | 
 | 1034 |   __ add(temp1, temp1, ShifterOperand(sizeof(uint32_t))); | 
| Vladimir Marko | a63f0d4 | 2015-09-01 13:36:35 +0100 | [diff] [blame] | 1035 |   __ subs(temp, temp, ShifterOperand(sizeof(uint32_t) /  sizeof(uint16_t))); | 
 | 1036 |   __ b(&loop, GT); | 
| Agi Csaki | 289cd55 | 2015-08-18 17:10:38 -0700 | [diff] [blame] | 1037 |  | 
 | 1038 |   // Return true and exit the function. | 
 | 1039 |   // If loop does not result in returning false, we return true. | 
 | 1040 |   __ Bind(&return_true); | 
 | 1041 |   __ LoadImmediate(out, 1); | 
 | 1042 |   __ b(&end); | 
 | 1043 |  | 
 | 1044 |   // Return false and exit the function. | 
 | 1045 |   __ Bind(&return_false); | 
 | 1046 |   __ LoadImmediate(out, 0); | 
 | 1047 |   __ Bind(&end); | 
 | 1048 | } | 
 | 1049 |  | 
| Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1050 | static void GenerateVisitStringIndexOf(HInvoke* invoke, | 
 | 1051 |                                        ArmAssembler* assembler, | 
 | 1052 |                                        CodeGeneratorARM* codegen, | 
 | 1053 |                                        ArenaAllocator* allocator, | 
 | 1054 |                                        bool start_at_zero) { | 
 | 1055 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 1056 |   Register tmp_reg = locations->GetTemp(0).AsRegister<Register>(); | 
 | 1057 |  | 
 | 1058 |   // Note that the null check must have been done earlier. | 
 | 1059 |   DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); | 
 | 1060 |  | 
 | 1061 |   // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, | 
 | 1062 |   // or directly dispatch if we have a constant. | 
| Andreas Gampe | 85b62f2 | 2015-09-09 13:15:38 -0700 | [diff] [blame] | 1063 |   SlowPathCode* slow_path = nullptr; | 
| Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1064 |   if (invoke->InputAt(1)->IsIntConstant()) { | 
 | 1065 |     if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > | 
 | 1066 |         std::numeric_limits<uint16_t>::max()) { | 
 | 1067 |       // Always needs the slow-path. We could directly dispatch to it, but this case should be | 
 | 1068 |       // rare, so for simplicity just put the full slow-path down and branch unconditionally. | 
 | 1069 |       slow_path = new (allocator) IntrinsicSlowPathARM(invoke); | 
 | 1070 |       codegen->AddSlowPath(slow_path); | 
 | 1071 |       __ b(slow_path->GetEntryLabel()); | 
 | 1072 |       __ Bind(slow_path->GetExitLabel()); | 
 | 1073 |       return; | 
 | 1074 |     } | 
 | 1075 |   } else { | 
 | 1076 |     Register char_reg = locations->InAt(1).AsRegister<Register>(); | 
 | 1077 |     __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max()); | 
 | 1078 |     __ cmp(char_reg, ShifterOperand(tmp_reg)); | 
 | 1079 |     slow_path = new (allocator) IntrinsicSlowPathARM(invoke); | 
 | 1080 |     codegen->AddSlowPath(slow_path); | 
 | 1081 |     __ b(slow_path->GetEntryLabel(), HI); | 
 | 1082 |   } | 
 | 1083 |  | 
 | 1084 |   if (start_at_zero) { | 
 | 1085 |     DCHECK_EQ(tmp_reg, R2); | 
 | 1086 |     // Start-index = 0. | 
 | 1087 |     __ LoadImmediate(tmp_reg, 0); | 
 | 1088 |   } | 
 | 1089 |  | 
 | 1090 |   __ LoadFromOffset(kLoadWord, LR, TR, | 
 | 1091 |                     QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value()); | 
 | 1092 |   __ blx(LR); | 
 | 1093 |  | 
 | 1094 |   if (slow_path != nullptr) { | 
 | 1095 |     __ Bind(slow_path->GetExitLabel()); | 
 | 1096 |   } | 
 | 1097 | } | 
 | 1098 |  | 
 | 1099 | void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) { | 
 | 1100 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 1101 |                                                             LocationSummary::kCall, | 
 | 1102 |                                                             kIntrinsified); | 
 | 1103 |   // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's | 
 | 1104 |   // best to align the inputs accordingly. | 
 | 1105 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 1106 |   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); | 
 | 1107 |   locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); | 
 | 1108 |   locations->SetOut(Location::RegisterLocation(R0)); | 
 | 1109 |  | 
 | 1110 |   // Need a temp for slow-path codepoint compare, and need to send start-index=0. | 
 | 1111 |   locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2))); | 
 | 1112 | } | 
 | 1113 |  | 
 | 1114 | void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 1115 |   GenerateVisitStringIndexOf( | 
 | 1116 |       invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); | 
| Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1117 | } | 
 | 1118 |  | 
 | 1119 | void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) { | 
 | 1120 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 1121 |                                                             LocationSummary::kCall, | 
 | 1122 |                                                             kIntrinsified); | 
 | 1123 |   // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's | 
 | 1124 |   // best to align the inputs accordingly. | 
 | 1125 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 1126 |   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); | 
 | 1127 |   locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); | 
 | 1128 |   locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); | 
 | 1129 |   locations->SetOut(Location::RegisterLocation(R0)); | 
 | 1130 |  | 
 | 1131 |   // Need a temp for slow-path codepoint compare. | 
 | 1132 |   locations->AddTemp(Location::RequiresRegister()); | 
 | 1133 | } | 
 | 1134 |  | 
 | 1135 | void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) { | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 1136 |   GenerateVisitStringIndexOf( | 
 | 1137 |       invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); | 
| Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1138 | } | 
 | 1139 |  | 
| Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1140 | void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) { | 
 | 1141 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 1142 |                                                             LocationSummary::kCall, | 
 | 1143 |                                                             kIntrinsified); | 
 | 1144 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 1145 |   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); | 
 | 1146 |   locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); | 
 | 1147 |   locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); | 
 | 1148 |   locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3))); | 
 | 1149 |   locations->SetOut(Location::RegisterLocation(R0)); | 
 | 1150 | } | 
 | 1151 |  | 
 | 1152 | void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) { | 
 | 1153 |   ArmAssembler* assembler = GetAssembler(); | 
 | 1154 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 1155 |  | 
 | 1156 |   Register byte_array = locations->InAt(0).AsRegister<Register>(); | 
 | 1157 |   __ cmp(byte_array, ShifterOperand(0)); | 
| Andreas Gampe | 85b62f2 | 2015-09-09 13:15:38 -0700 | [diff] [blame] | 1158 |   SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke); | 
| Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1159 |   codegen_->AddSlowPath(slow_path); | 
 | 1160 |   __ b(slow_path->GetEntryLabel(), EQ); | 
 | 1161 |  | 
 | 1162 |   __ LoadFromOffset( | 
 | 1163 |       kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value()); | 
 | 1164 |   codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); | 
 | 1165 |   __ blx(LR); | 
 | 1166 |   __ Bind(slow_path->GetExitLabel()); | 
 | 1167 | } | 
 | 1168 |  | 
 | 1169 | void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) { | 
 | 1170 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 1171 |                                                             LocationSummary::kCall, | 
 | 1172 |                                                             kIntrinsified); | 
 | 1173 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 1174 |   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); | 
 | 1175 |   locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); | 
 | 1176 |   locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); | 
 | 1177 |   locations->SetOut(Location::RegisterLocation(R0)); | 
 | 1178 | } | 
 | 1179 |  | 
 | 1180 | void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) { | 
 | 1181 |   ArmAssembler* assembler = GetAssembler(); | 
 | 1182 |  | 
 | 1183 |   __ LoadFromOffset( | 
 | 1184 |       kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value()); | 
 | 1185 |   codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); | 
 | 1186 |   __ blx(LR); | 
 | 1187 | } | 
 | 1188 |  | 
 | 1189 | void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) { | 
 | 1190 |   LocationSummary* locations = new (arena_) LocationSummary(invoke, | 
 | 1191 |                                                             LocationSummary::kCall, | 
 | 1192 |                                                             kIntrinsified); | 
 | 1193 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 1194 |   locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); | 
 | 1195 |   locations->SetOut(Location::RegisterLocation(R0)); | 
 | 1196 | } | 
 | 1197 |  | 
 | 1198 | void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) { | 
 | 1199 |   ArmAssembler* assembler = GetAssembler(); | 
 | 1200 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 1201 |  | 
 | 1202 |   Register string_to_copy = locations->InAt(0).AsRegister<Register>(); | 
 | 1203 |   __ cmp(string_to_copy, ShifterOperand(0)); | 
| Andreas Gampe | 85b62f2 | 2015-09-09 13:15:38 -0700 | [diff] [blame] | 1204 |   SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke); | 
| Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1205 |   codegen_->AddSlowPath(slow_path); | 
 | 1206 |   __ b(slow_path->GetEntryLabel(), EQ); | 
 | 1207 |  | 
 | 1208 |   __ LoadFromOffset(kLoadWord, | 
 | 1209 |       LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value()); | 
 | 1210 |   codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); | 
 | 1211 |   __ blx(LR); | 
 | 1212 |   __ Bind(slow_path->GetExitLabel()); | 
 | 1213 | } | 
 | 1214 |  | 
| Nicolas Geoffray | 5bd05a5 | 2015-10-13 09:48:30 +0100 | [diff] [blame] | 1215 | void IntrinsicLocationsBuilderARM::VisitSystemArrayCopy(HInvoke* invoke) { | 
 | 1216 |   CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke); | 
 | 1217 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 1218 |   if (locations == nullptr) { | 
 | 1219 |     return; | 
 | 1220 |   } | 
 | 1221 |  | 
 | 1222 |   HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant(); | 
 | 1223 |   HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant(); | 
 | 1224 |   HIntConstant* length = invoke->InputAt(4)->AsIntConstant(); | 
 | 1225 |  | 
 | 1226 |   if (src_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(src_pos->GetValue())) { | 
 | 1227 |     locations->SetInAt(1, Location::RequiresRegister()); | 
 | 1228 |   } | 
 | 1229 |   if (dest_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(dest_pos->GetValue())) { | 
 | 1230 |     locations->SetInAt(3, Location::RequiresRegister()); | 
 | 1231 |   } | 
 | 1232 |   if (length != nullptr && !assembler_->ShifterOperandCanAlwaysHold(length->GetValue())) { | 
 | 1233 |     locations->SetInAt(4, Location::RequiresRegister()); | 
 | 1234 |   } | 
 | 1235 | } | 
 | 1236 |  | 
 | 1237 | static void CheckPosition(ArmAssembler* assembler, | 
 | 1238 |                           Location pos, | 
 | 1239 |                           Register input, | 
 | 1240 |                           Location length, | 
 | 1241 |                           SlowPathCode* slow_path, | 
 | 1242 |                           Register input_len, | 
 | 1243 |                           Register temp, | 
 | 1244 |                           bool length_is_input_length = false) { | 
 | 1245 |   // Where is the length in the Array? | 
 | 1246 |   const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value(); | 
 | 1247 |  | 
 | 1248 |   if (pos.IsConstant()) { | 
 | 1249 |     int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue(); | 
 | 1250 |     if (pos_const == 0) { | 
 | 1251 |       if (!length_is_input_length) { | 
 | 1252 |         // Check that length(input) >= length. | 
 | 1253 |         __ LoadFromOffset(kLoadWord, temp, input, length_offset); | 
 | 1254 |         if (length.IsConstant()) { | 
 | 1255 |           __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue())); | 
 | 1256 |         } else { | 
 | 1257 |           __ cmp(temp, ShifterOperand(length.AsRegister<Register>())); | 
 | 1258 |         } | 
 | 1259 |         __ b(slow_path->GetEntryLabel(), LT); | 
 | 1260 |       } | 
 | 1261 |     } else { | 
 | 1262 |       // Check that length(input) >= pos. | 
 | 1263 |       __ LoadFromOffset(kLoadWord, input_len, input, length_offset); | 
 | 1264 |       __ subs(temp, input_len, ShifterOperand(pos_const)); | 
 | 1265 |       __ b(slow_path->GetEntryLabel(), LT); | 
 | 1266 |  | 
 | 1267 |       // Check that (length(input) - pos) >= length. | 
 | 1268 |       if (length.IsConstant()) { | 
 | 1269 |         __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue())); | 
 | 1270 |       } else { | 
 | 1271 |         __ cmp(temp, ShifterOperand(length.AsRegister<Register>())); | 
 | 1272 |       } | 
 | 1273 |       __ b(slow_path->GetEntryLabel(), LT); | 
 | 1274 |     } | 
 | 1275 |   } else if (length_is_input_length) { | 
 | 1276 |     // The only way the copy can succeed is if pos is zero. | 
 | 1277 |     Register pos_reg = pos.AsRegister<Register>(); | 
 | 1278 |     __ CompareAndBranchIfNonZero(pos_reg, slow_path->GetEntryLabel()); | 
 | 1279 |   } else { | 
 | 1280 |     // Check that pos >= 0. | 
 | 1281 |     Register pos_reg = pos.AsRegister<Register>(); | 
 | 1282 |     __ cmp(pos_reg, ShifterOperand(0)); | 
 | 1283 |     __ b(slow_path->GetEntryLabel(), LT); | 
 | 1284 |  | 
 | 1285 |     // Check that pos <= length(input). | 
 | 1286 |     __ LoadFromOffset(kLoadWord, temp, input, length_offset); | 
 | 1287 |     __ subs(temp, temp, ShifterOperand(pos_reg)); | 
 | 1288 |     __ b(slow_path->GetEntryLabel(), LT); | 
 | 1289 |  | 
 | 1290 |     // Check that (length(input) - pos) >= length. | 
 | 1291 |     if (length.IsConstant()) { | 
 | 1292 |       __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue())); | 
 | 1293 |     } else { | 
 | 1294 |       __ cmp(temp, ShifterOperand(length.AsRegister<Register>())); | 
 | 1295 |     } | 
 | 1296 |     __ b(slow_path->GetEntryLabel(), LT); | 
 | 1297 |   } | 
 | 1298 | } | 
 | 1299 |  | 
| Roland Levillain | 3b359c7 | 2015-11-17 19:35:12 +0000 | [diff] [blame] | 1300 | // TODO: Implement read barriers in the SystemArrayCopy intrinsic. | 
 | 1301 | // Note that this code path is not used (yet) because we do not | 
 | 1302 | // intrinsify methods that can go into the IntrinsicSlowPathARM | 
 | 1303 | // slow path. | 
| Nicolas Geoffray | 5bd05a5 | 2015-10-13 09:48:30 +0100 | [diff] [blame] | 1304 | void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) { | 
 | 1305 |   ArmAssembler* assembler = GetAssembler(); | 
 | 1306 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 1307 |  | 
 | 1308 |   uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); | 
 | 1309 |   uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value(); | 
 | 1310 |   uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value(); | 
 | 1311 |   uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value(); | 
 | 1312 |  | 
 | 1313 |   Register src = locations->InAt(0).AsRegister<Register>(); | 
 | 1314 |   Location src_pos = locations->InAt(1); | 
 | 1315 |   Register dest = locations->InAt(2).AsRegister<Register>(); | 
 | 1316 |   Location dest_pos = locations->InAt(3); | 
 | 1317 |   Location length = locations->InAt(4); | 
 | 1318 |   Register temp1 = locations->GetTemp(0).AsRegister<Register>(); | 
 | 1319 |   Register temp2 = locations->GetTemp(1).AsRegister<Register>(); | 
 | 1320 |   Register temp3 = locations->GetTemp(2).AsRegister<Register>(); | 
 | 1321 |  | 
 | 1322 |   SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke); | 
 | 1323 |   codegen_->AddSlowPath(slow_path); | 
 | 1324 |  | 
 | 1325 |   Label ok; | 
 | 1326 |   SystemArrayCopyOptimizations optimizations(invoke); | 
 | 1327 |  | 
 | 1328 |   if (!optimizations.GetDestinationIsSource()) { | 
 | 1329 |     if (!src_pos.IsConstant() || !dest_pos.IsConstant()) { | 
 | 1330 |       __ cmp(src, ShifterOperand(dest)); | 
 | 1331 |     } | 
 | 1332 |   } | 
 | 1333 |  | 
 | 1334 |   // If source and destination are the same, we go to slow path if we need to do | 
 | 1335 |   // forward copying. | 
 | 1336 |   if (src_pos.IsConstant()) { | 
 | 1337 |     int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue(); | 
 | 1338 |     if (dest_pos.IsConstant()) { | 
 | 1339 |       // Checked when building locations. | 
 | 1340 |       DCHECK(!optimizations.GetDestinationIsSource() | 
 | 1341 |              || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue())); | 
 | 1342 |     } else { | 
 | 1343 |       if (!optimizations.GetDestinationIsSource()) { | 
 | 1344 |         __ b(&ok, NE); | 
 | 1345 |       } | 
 | 1346 |       __ cmp(dest_pos.AsRegister<Register>(), ShifterOperand(src_pos_constant)); | 
 | 1347 |       __ b(slow_path->GetEntryLabel(), GT); | 
 | 1348 |     } | 
 | 1349 |   } else { | 
 | 1350 |     if (!optimizations.GetDestinationIsSource()) { | 
 | 1351 |       __ b(&ok, NE); | 
 | 1352 |     } | 
 | 1353 |     if (dest_pos.IsConstant()) { | 
 | 1354 |       int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue(); | 
 | 1355 |       __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos_constant)); | 
 | 1356 |     } else { | 
 | 1357 |       __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos.AsRegister<Register>())); | 
 | 1358 |     } | 
 | 1359 |     __ b(slow_path->GetEntryLabel(), LT); | 
 | 1360 |   } | 
 | 1361 |  | 
 | 1362 |   __ Bind(&ok); | 
 | 1363 |  | 
 | 1364 |   if (!optimizations.GetSourceIsNotNull()) { | 
 | 1365 |     // Bail out if the source is null. | 
 | 1366 |     __ CompareAndBranchIfZero(src, slow_path->GetEntryLabel()); | 
 | 1367 |   } | 
 | 1368 |  | 
 | 1369 |   if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) { | 
 | 1370 |     // Bail out if the destination is null. | 
 | 1371 |     __ CompareAndBranchIfZero(dest, slow_path->GetEntryLabel()); | 
 | 1372 |   } | 
 | 1373 |  | 
 | 1374 |   // If the length is negative, bail out. | 
 | 1375 |   // We have already checked in the LocationsBuilder for the constant case. | 
 | 1376 |   if (!length.IsConstant() && | 
 | 1377 |       !optimizations.GetCountIsSourceLength() && | 
 | 1378 |       !optimizations.GetCountIsDestinationLength()) { | 
 | 1379 |     __ cmp(length.AsRegister<Register>(), ShifterOperand(0)); | 
 | 1380 |     __ b(slow_path->GetEntryLabel(), LT); | 
 | 1381 |   } | 
 | 1382 |  | 
 | 1383 |   // Validity checks: source. | 
 | 1384 |   CheckPosition(assembler, | 
 | 1385 |                 src_pos, | 
 | 1386 |                 src, | 
 | 1387 |                 length, | 
 | 1388 |                 slow_path, | 
 | 1389 |                 temp1, | 
 | 1390 |                 temp2, | 
 | 1391 |                 optimizations.GetCountIsSourceLength()); | 
 | 1392 |  | 
 | 1393 |   // Validity checks: dest. | 
 | 1394 |   CheckPosition(assembler, | 
 | 1395 |                 dest_pos, | 
 | 1396 |                 dest, | 
 | 1397 |                 length, | 
 | 1398 |                 slow_path, | 
 | 1399 |                 temp1, | 
 | 1400 |                 temp2, | 
 | 1401 |                 optimizations.GetCountIsDestinationLength()); | 
 | 1402 |  | 
 | 1403 |   if (!optimizations.GetDoesNotNeedTypeCheck()) { | 
 | 1404 |     // Check whether all elements of the source array are assignable to the component | 
 | 1405 |     // type of the destination array. We do two checks: the classes are the same, | 
 | 1406 |     // or the destination is Object[]. If none of these checks succeed, we go to the | 
 | 1407 |     // slow path. | 
 | 1408 |     __ LoadFromOffset(kLoadWord, temp1, dest, class_offset); | 
 | 1409 |     __ LoadFromOffset(kLoadWord, temp2, src, class_offset); | 
 | 1410 |     bool did_unpoison = false; | 
 | 1411 |     if (!optimizations.GetDestinationIsNonPrimitiveArray() || | 
 | 1412 |         !optimizations.GetSourceIsNonPrimitiveArray()) { | 
 | 1413 |       // One or two of the references need to be unpoisoned. Unpoisoned them | 
 | 1414 |       // both to make the identity check valid. | 
 | 1415 |       __ MaybeUnpoisonHeapReference(temp1); | 
 | 1416 |       __ MaybeUnpoisonHeapReference(temp2); | 
 | 1417 |       did_unpoison = true; | 
 | 1418 |     } | 
 | 1419 |  | 
 | 1420 |     if (!optimizations.GetDestinationIsNonPrimitiveArray()) { | 
 | 1421 |       // Bail out if the destination is not a non primitive array. | 
 | 1422 |       __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset); | 
 | 1423 |       __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel()); | 
 | 1424 |       __ MaybeUnpoisonHeapReference(temp3); | 
 | 1425 |       __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset); | 
 | 1426 |       static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot"); | 
 | 1427 |       __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel()); | 
 | 1428 |     } | 
 | 1429 |  | 
 | 1430 |     if (!optimizations.GetSourceIsNonPrimitiveArray()) { | 
 | 1431 |       // Bail out if the source is not a non primitive array. | 
 | 1432 |       // Bail out if the destination is not a non primitive array. | 
 | 1433 |       __ LoadFromOffset(kLoadWord, temp3, temp2, component_offset); | 
 | 1434 |       __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel()); | 
 | 1435 |       __ MaybeUnpoisonHeapReference(temp3); | 
 | 1436 |       __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset); | 
 | 1437 |       static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot"); | 
 | 1438 |       __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel()); | 
 | 1439 |     } | 
 | 1440 |  | 
 | 1441 |     __ cmp(temp1, ShifterOperand(temp2)); | 
 | 1442 |  | 
 | 1443 |     if (optimizations.GetDestinationIsTypedObjectArray()) { | 
 | 1444 |       Label do_copy; | 
 | 1445 |       __ b(&do_copy, EQ); | 
 | 1446 |       if (!did_unpoison) { | 
 | 1447 |         __ MaybeUnpoisonHeapReference(temp1); | 
 | 1448 |       } | 
 | 1449 |       __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset); | 
 | 1450 |       __ MaybeUnpoisonHeapReference(temp1); | 
 | 1451 |       __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset); | 
 | 1452 |       // No need to unpoison the result, we're comparing against null. | 
 | 1453 |       __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel()); | 
 | 1454 |       __ Bind(&do_copy); | 
 | 1455 |     } else { | 
 | 1456 |       __ b(slow_path->GetEntryLabel(), NE); | 
 | 1457 |     } | 
 | 1458 |   } else if (!optimizations.GetSourceIsNonPrimitiveArray()) { | 
 | 1459 |     DCHECK(optimizations.GetDestinationIsNonPrimitiveArray()); | 
 | 1460 |     // Bail out if the source is not a non primitive array. | 
 | 1461 |     __ LoadFromOffset(kLoadWord, temp1, src, class_offset); | 
 | 1462 |     __ MaybeUnpoisonHeapReference(temp1); | 
 | 1463 |     __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset); | 
 | 1464 |     __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel()); | 
 | 1465 |     __ MaybeUnpoisonHeapReference(temp3); | 
 | 1466 |     __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset); | 
 | 1467 |     static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot"); | 
 | 1468 |     __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel()); | 
 | 1469 |   } | 
 | 1470 |  | 
 | 1471 |   // Compute base source address, base destination address, and end source address. | 
 | 1472 |  | 
 | 1473 |   uint32_t element_size = sizeof(int32_t); | 
 | 1474 |   uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value(); | 
 | 1475 |   if (src_pos.IsConstant()) { | 
 | 1476 |     int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue(); | 
 | 1477 |     __ AddConstant(temp1, src, element_size * constant + offset); | 
 | 1478 |   } else { | 
 | 1479 |     __ add(temp1, src, ShifterOperand(src_pos.AsRegister<Register>(), LSL, 2)); | 
 | 1480 |     __ AddConstant(temp1, offset); | 
 | 1481 |   } | 
 | 1482 |  | 
 | 1483 |   if (dest_pos.IsConstant()) { | 
 | 1484 |     int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue(); | 
 | 1485 |     __ AddConstant(temp2, dest, element_size * constant + offset); | 
 | 1486 |   } else { | 
 | 1487 |     __ add(temp2, dest, ShifterOperand(dest_pos.AsRegister<Register>(), LSL, 2)); | 
 | 1488 |     __ AddConstant(temp2, offset); | 
 | 1489 |   } | 
 | 1490 |  | 
 | 1491 |   if (length.IsConstant()) { | 
 | 1492 |     int32_t constant = length.GetConstant()->AsIntConstant()->GetValue(); | 
 | 1493 |     __ AddConstant(temp3, temp1, element_size * constant); | 
 | 1494 |   } else { | 
 | 1495 |     __ add(temp3, temp1, ShifterOperand(length.AsRegister<Register>(), LSL, 2)); | 
 | 1496 |   } | 
 | 1497 |  | 
 | 1498 |   // Iterate over the arrays and do a raw copy of the objects. We don't need to | 
 | 1499 |   // poison/unpoison, nor do any read barrier as the next uses of the destination | 
 | 1500 |   // array will do it. | 
 | 1501 |   Label loop, done; | 
 | 1502 |   __ cmp(temp1, ShifterOperand(temp3)); | 
 | 1503 |   __ b(&done, EQ); | 
 | 1504 |   __ Bind(&loop); | 
 | 1505 |   __ ldr(IP, Address(temp1, element_size, Address::PostIndex)); | 
 | 1506 |   __ str(IP, Address(temp2, element_size, Address::PostIndex)); | 
 | 1507 |   __ cmp(temp1, ShifterOperand(temp3)); | 
 | 1508 |   __ b(&loop, NE); | 
 | 1509 |   __ Bind(&done); | 
 | 1510 |  | 
 | 1511 |   // We only need one card marking on the destination array. | 
 | 1512 |   codegen_->MarkGCCard(temp1, | 
 | 1513 |                        temp2, | 
 | 1514 |                        dest, | 
 | 1515 |                        Register(kNoRegister), | 
| Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 1516 |                        /* can_be_null */ false); | 
| Nicolas Geoffray | 5bd05a5 | 2015-10-13 09:48:30 +0100 | [diff] [blame] | 1517 |  | 
 | 1518 |   __ Bind(slow_path->GetExitLabel()); | 
 | 1519 | } | 
 | 1520 |  | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 1521 | // Unimplemented intrinsics. | 
 | 1522 |  | 
 | 1523 | #define UNIMPLEMENTED_INTRINSIC(Name)                                                  \ | 
 | 1524 | void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ | 
 | 1525 | }                                                                                      \ | 
 | 1526 | void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) {    \ | 
 | 1527 | } | 
 | 1528 |  | 
 | 1529 | UNIMPLEMENTED_INTRINSIC(IntegerReverse) | 
 | 1530 | UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes) | 
| Scott Wakeling | 40a04bf | 2015-12-11 09:50:36 +0000 | [diff] [blame^] | 1531 | UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft) | 
 | 1532 | UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 1533 | UNIMPLEMENTED_INTRINSIC(LongReverse) | 
 | 1534 | UNIMPLEMENTED_INTRINSIC(LongReverseBytes) | 
| Scott Wakeling | 40a04bf | 2015-12-11 09:50:36 +0000 | [diff] [blame^] | 1535 | UNIMPLEMENTED_INTRINSIC(LongRotateLeft) | 
 | 1536 | UNIMPLEMENTED_INTRINSIC(LongRotateRight) | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 1537 | UNIMPLEMENTED_INTRINSIC(ShortReverseBytes) | 
 | 1538 | UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble) | 
 | 1539 | UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat) | 
 | 1540 | UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble) | 
 | 1541 | UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat) | 
 | 1542 | UNIMPLEMENTED_INTRINSIC(MathMinLongLong) | 
 | 1543 | UNIMPLEMENTED_INTRINSIC(MathMaxLongLong) | 
 | 1544 | UNIMPLEMENTED_INTRINSIC(MathCeil)          // Could be done by changing rounding mode, maybe? | 
 | 1545 | UNIMPLEMENTED_INTRINSIC(MathFloor)         // Could be done by changing rounding mode, maybe? | 
 | 1546 | UNIMPLEMENTED_INTRINSIC(MathRint) | 
 | 1547 | UNIMPLEMENTED_INTRINSIC(MathRoundDouble)   // Could be done by changing rounding mode, maybe? | 
 | 1548 | UNIMPLEMENTED_INTRINSIC(MathRoundFloat)    // Could be done by changing rounding mode, maybe? | 
 | 1549 | UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)     // High register pressure. | 
 | 1550 | UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar) | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 1551 | UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent) | 
| Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1552 | UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck) | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 1553 |  | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1554 | #undef UNIMPLEMENTED_INTRINSIC | 
 | 1555 |  | 
 | 1556 | #undef __ | 
 | 1557 |  | 
| Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 1558 | }  // namespace arm | 
 | 1559 | }  // namespace art |