blob: 69a3e627c998089a2da7e8d64ec551d3ca3cc545 [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080021#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080025#include "mirror/string.h"
26#include "thread.h"
27#include "utils/arm/assembler_arm.h"
28
29namespace art {
30
31namespace arm {
32
33ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
34 return codegen_->GetAssembler();
35}
36
37ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
38 return codegen_->GetGraph()->GetArena();
39}
40
41#define __ codegen->GetAssembler()->
42
43static void MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM* codegen) {
44 if (!trg.IsValid()) {
45 DCHECK(type == Primitive::kPrimVoid);
46 return;
47 }
48
49 DCHECK_NE(type, Primitive::kPrimVoid);
50
Jeff Hao848f70a2014-01-15 13:49:50 -080051 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080052 if (type == Primitive::kPrimLong) {
53 Register trg_reg_lo = trg.AsRegisterPairLow<Register>();
54 Register trg_reg_hi = trg.AsRegisterPairHigh<Register>();
55 Register res_reg_lo = R0;
56 Register res_reg_hi = R1;
57 if (trg_reg_lo != res_reg_hi) {
58 if (trg_reg_lo != res_reg_lo) {
59 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
60 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
61 } else {
62 DCHECK_EQ(trg_reg_lo + 1, trg_reg_hi);
63 }
64 } else {
65 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
66 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
67 }
68 } else {
69 Register trg_reg = trg.AsRegister<Register>();
70 Register res_reg = R0;
71 if (trg_reg != res_reg) {
72 __ mov(trg_reg, ShifterOperand(res_reg));
73 }
74 }
75 } else {
76 UNIMPLEMENTED(FATAL) << "Floating-point return.";
77 }
78}
79
Roland Levillainec525fc2015-04-28 15:50:20 +010080static void MoveArguments(HInvoke* invoke, CodeGeneratorARM* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010081 InvokeDexCallingConventionVisitorARM calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010082 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080083}
84
85// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
86// call. This will copy the arguments into the positions for a regular call.
87//
88// Note: The actual parameters are required to be in the locations given by the invoke's location
89// summary. If an intrinsic modifies those locations before a slowpath call, they must be
90// restored!
91class IntrinsicSlowPathARM : public SlowPathCodeARM {
92 public:
93 explicit IntrinsicSlowPathARM(HInvoke* invoke) : invoke_(invoke) { }
94
95 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
96 CodeGeneratorARM* codegen = down_cast<CodeGeneratorARM*>(codegen_in);
97 __ Bind(GetEntryLabel());
98
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +000099 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800100
Roland Levillainec525fc2015-04-28 15:50:20 +0100101 MoveArguments(invoke_, codegen);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800102
103 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100104 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
105 Location::RegisterLocation(kArtMethodRegister));
Serban Constantinescu4ab02352015-08-12 11:52:22 +0100106 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800107 } else {
108 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
109 UNREACHABLE();
110 }
111
112 // Copy the result back to the expected output.
113 Location out = invoke_->GetLocations()->Out();
114 if (out.IsValid()) {
115 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
116 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
117 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
118 }
119
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000120 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800121 __ b(GetExitLabel());
122 }
123
Alexandre Rames9931f312015-06-19 14:47:01 +0100124 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM"; }
125
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800126 private:
127 // The instruction where this slow path is happening.
128 HInvoke* const invoke_;
129
130 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM);
131};
132
133#undef __
134
135bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
136 Dispatch(invoke);
137 LocationSummary* res = invoke->GetLocations();
138 return res != nullptr && res->Intrinsified();
139}
140
141#define __ assembler->
142
143static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
144 LocationSummary* locations = new (arena) LocationSummary(invoke,
145 LocationSummary::kNoCall,
146 kIntrinsified);
147 locations->SetInAt(0, Location::RequiresFpuRegister());
148 locations->SetOut(Location::RequiresRegister());
149}
150
151static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
152 LocationSummary* locations = new (arena) LocationSummary(invoke,
153 LocationSummary::kNoCall,
154 kIntrinsified);
155 locations->SetInAt(0, Location::RequiresRegister());
156 locations->SetOut(Location::RequiresFpuRegister());
157}
158
159static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
160 Location input = locations->InAt(0);
161 Location output = locations->Out();
162 if (is64bit) {
163 __ vmovrrd(output.AsRegisterPairLow<Register>(),
164 output.AsRegisterPairHigh<Register>(),
165 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
166 } else {
167 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
168 }
169}
170
171static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
172 Location input = locations->InAt(0);
173 Location output = locations->Out();
174 if (is64bit) {
175 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
176 input.AsRegisterPairLow<Register>(),
177 input.AsRegisterPairHigh<Register>());
178 } else {
179 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
180 }
181}
182
183void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
184 CreateFPToIntLocations(arena_, invoke);
185}
186void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
187 CreateIntToFPLocations(arena_, invoke);
188}
189
190void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
191 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
192}
193void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
194 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
195}
196
197void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
198 CreateFPToIntLocations(arena_, invoke);
199}
200void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
201 CreateIntToFPLocations(arena_, invoke);
202}
203
204void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
205 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
206}
207void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
208 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
209}
210
211static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
212 LocationSummary* locations = new (arena) LocationSummary(invoke,
213 LocationSummary::kNoCall,
214 kIntrinsified);
215 locations->SetInAt(0, Location::RequiresRegister());
216 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
217}
218
219static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
220 LocationSummary* locations = new (arena) LocationSummary(invoke,
221 LocationSummary::kNoCall,
222 kIntrinsified);
223 locations->SetInAt(0, Location::RequiresFpuRegister());
224 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
225}
226
Scott Wakeling611d3392015-07-10 11:42:06 +0100227static void GenNumberOfLeadingZeros(LocationSummary* locations,
228 Primitive::Type type,
229 ArmAssembler* assembler) {
230 Location in = locations->InAt(0);
231 Register out = locations->Out().AsRegister<Register>();
232
233 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
234
235 if (type == Primitive::kPrimLong) {
236 Register in_reg_lo = in.AsRegisterPairLow<Register>();
237 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
238 Label end;
239 __ clz(out, in_reg_hi);
240 __ CompareAndBranchIfNonZero(in_reg_hi, &end);
241 __ clz(out, in_reg_lo);
242 __ AddConstant(out, 32);
243 __ Bind(&end);
244 } else {
245 __ clz(out, in.AsRegister<Register>());
246 }
247}
248
249void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
250 CreateIntToIntLocations(arena_, invoke);
251}
252
253void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
254 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
255}
256
257void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
258 LocationSummary* locations = new (arena_) LocationSummary(invoke,
259 LocationSummary::kNoCall,
260 kIntrinsified);
261 locations->SetInAt(0, Location::RequiresRegister());
262 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
263}
264
265void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
266 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
267}
268
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800269static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
270 Location in = locations->InAt(0);
271 Location out = locations->Out();
272
273 if (is64bit) {
274 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
275 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
276 } else {
277 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
278 }
279}
280
281void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
282 CreateFPToFPLocations(arena_, invoke);
283}
284
285void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
286 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
287}
288
289void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
290 CreateFPToFPLocations(arena_, invoke);
291}
292
293void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
294 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
295}
296
297static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
298 LocationSummary* locations = new (arena) LocationSummary(invoke,
299 LocationSummary::kNoCall,
300 kIntrinsified);
301 locations->SetInAt(0, Location::RequiresRegister());
302 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
303
304 locations->AddTemp(Location::RequiresRegister());
305}
306
307static void GenAbsInteger(LocationSummary* locations,
308 bool is64bit,
309 ArmAssembler* assembler) {
310 Location in = locations->InAt(0);
311 Location output = locations->Out();
312
313 Register mask = locations->GetTemp(0).AsRegister<Register>();
314
315 if (is64bit) {
316 Register in_reg_lo = in.AsRegisterPairLow<Register>();
317 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
318 Register out_reg_lo = output.AsRegisterPairLow<Register>();
319 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
320
321 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
322
323 __ Asr(mask, in_reg_hi, 31);
324 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
325 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
326 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
327 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
328 } else {
329 Register in_reg = in.AsRegister<Register>();
330 Register out_reg = output.AsRegister<Register>();
331
332 __ Asr(mask, in_reg, 31);
333 __ add(out_reg, in_reg, ShifterOperand(mask));
334 __ eor(out_reg, mask, ShifterOperand(out_reg));
335 }
336}
337
338void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
339 CreateIntToIntPlusTemp(arena_, invoke);
340}
341
342void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
343 GenAbsInteger(invoke->GetLocations(), false, GetAssembler());
344}
345
346
347void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
348 CreateIntToIntPlusTemp(arena_, invoke);
349}
350
351void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
352 GenAbsInteger(invoke->GetLocations(), true, GetAssembler());
353}
354
355static void GenMinMax(LocationSummary* locations,
356 bool is_min,
357 ArmAssembler* assembler) {
358 Register op1 = locations->InAt(0).AsRegister<Register>();
359 Register op2 = locations->InAt(1).AsRegister<Register>();
360 Register out = locations->Out().AsRegister<Register>();
361
362 __ cmp(op1, ShifterOperand(op2));
363
364 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
365 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
366 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
367}
368
369static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
370 LocationSummary* locations = new (arena) LocationSummary(invoke,
371 LocationSummary::kNoCall,
372 kIntrinsified);
373 locations->SetInAt(0, Location::RequiresRegister());
374 locations->SetInAt(1, Location::RequiresRegister());
375 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
376}
377
378void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
379 CreateIntIntToIntLocations(arena_, invoke);
380}
381
382void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
383 GenMinMax(invoke->GetLocations(), true, GetAssembler());
384}
385
386void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
387 CreateIntIntToIntLocations(arena_, invoke);
388}
389
390void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
391 GenMinMax(invoke->GetLocations(), false, GetAssembler());
392}
393
394void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
395 CreateFPToFPLocations(arena_, invoke);
396}
397
398void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
399 LocationSummary* locations = invoke->GetLocations();
400 ArmAssembler* assembler = GetAssembler();
401 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
402 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
403}
404
405void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
406 CreateIntToIntLocations(arena_, invoke);
407}
408
409void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
410 ArmAssembler* assembler = GetAssembler();
411 // Ignore upper 4B of long address.
412 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
413 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
414}
415
416void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
417 CreateIntToIntLocations(arena_, invoke);
418}
419
420void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
421 ArmAssembler* assembler = GetAssembler();
422 // Ignore upper 4B of long address.
423 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
424 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
425}
426
427void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
428 CreateIntToIntLocations(arena_, invoke);
429}
430
431void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
432 ArmAssembler* assembler = GetAssembler();
433 // Ignore upper 4B of long address.
434 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
435 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
436 // exception. So we can't use ldrd as addr may be unaligned.
437 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
438 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
439 if (addr == lo) {
440 __ ldr(hi, Address(addr, 4));
441 __ ldr(lo, Address(addr, 0));
442 } else {
443 __ ldr(lo, Address(addr, 0));
444 __ ldr(hi, Address(addr, 4));
445 }
446}
447
448void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
449 CreateIntToIntLocations(arena_, invoke);
450}
451
452void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
453 ArmAssembler* assembler = GetAssembler();
454 // Ignore upper 4B of long address.
455 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
456 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
457}
458
459static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
460 LocationSummary* locations = new (arena) LocationSummary(invoke,
461 LocationSummary::kNoCall,
462 kIntrinsified);
463 locations->SetInAt(0, Location::RequiresRegister());
464 locations->SetInAt(1, Location::RequiresRegister());
465}
466
467void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
468 CreateIntIntToVoidLocations(arena_, invoke);
469}
470
471void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
472 ArmAssembler* assembler = GetAssembler();
473 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
474 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
475}
476
477void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
478 CreateIntIntToVoidLocations(arena_, invoke);
479}
480
481void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
482 ArmAssembler* assembler = GetAssembler();
483 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
484 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
485}
486
487void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
488 CreateIntIntToVoidLocations(arena_, invoke);
489}
490
491void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
492 ArmAssembler* assembler = GetAssembler();
493 // Ignore upper 4B of long address.
494 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
495 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
496 // exception. So we can't use ldrd as addr may be unaligned.
497 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
498 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
499}
500
501void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
502 CreateIntIntToVoidLocations(arena_, invoke);
503}
504
505void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
506 ArmAssembler* assembler = GetAssembler();
507 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
508 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
509}
510
511void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
512 LocationSummary* locations = new (arena_) LocationSummary(invoke,
513 LocationSummary::kNoCall,
514 kIntrinsified);
515 locations->SetOut(Location::RequiresRegister());
516}
517
518void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
519 ArmAssembler* assembler = GetAssembler();
520 __ LoadFromOffset(kLoadWord,
521 invoke->GetLocations()->Out().AsRegister<Register>(),
522 TR,
523 Thread::PeerOffset<kArmPointerSize>().Int32Value());
524}
525
526static void GenUnsafeGet(HInvoke* invoke,
527 Primitive::Type type,
528 bool is_volatile,
529 CodeGeneratorARM* codegen) {
530 LocationSummary* locations = invoke->GetLocations();
531 DCHECK((type == Primitive::kPrimInt) ||
532 (type == Primitive::kPrimLong) ||
533 (type == Primitive::kPrimNot));
534 ArmAssembler* assembler = codegen->GetAssembler();
535 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
536 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
537
538 if (type == Primitive::kPrimLong) {
539 Register trg_lo = locations->Out().AsRegisterPairLow<Register>();
540 __ add(IP, base, ShifterOperand(offset));
541 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
542 Register trg_hi = locations->Out().AsRegisterPairHigh<Register>();
543 __ ldrexd(trg_lo, trg_hi, IP);
544 } else {
545 __ ldrd(trg_lo, Address(IP));
546 }
547 } else {
548 Register trg = locations->Out().AsRegister<Register>();
549 __ ldr(trg, Address(base, offset));
550 }
551
552 if (is_volatile) {
553 __ dmb(ISH);
554 }
Roland Levillain4d027112015-07-01 15:41:14 +0100555
556 if (type == Primitive::kPrimNot) {
557 Register trg = locations->Out().AsRegister<Register>();
558 __ MaybeUnpoisonHeapReference(trg);
559 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800560}
561
562static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
563 LocationSummary* locations = new (arena) LocationSummary(invoke,
564 LocationSummary::kNoCall,
565 kIntrinsified);
566 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
567 locations->SetInAt(1, Location::RequiresRegister());
568 locations->SetInAt(2, Location::RequiresRegister());
569 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
570}
571
572void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
573 CreateIntIntIntToIntLocations(arena_, invoke);
574}
575void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
576 CreateIntIntIntToIntLocations(arena_, invoke);
577}
578void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
579 CreateIntIntIntToIntLocations(arena_, invoke);
580}
581void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
582 CreateIntIntIntToIntLocations(arena_, invoke);
583}
584void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
585 CreateIntIntIntToIntLocations(arena_, invoke);
586}
587void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
588 CreateIntIntIntToIntLocations(arena_, invoke);
589}
590
591void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
592 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
593}
594void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
595 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
596}
597void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
598 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
599}
600void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
601 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
602}
603void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
604 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
605}
606void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
607 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
608}
609
610static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
611 const ArmInstructionSetFeatures& features,
612 Primitive::Type type,
613 bool is_volatile,
614 HInvoke* invoke) {
615 LocationSummary* locations = new (arena) LocationSummary(invoke,
616 LocationSummary::kNoCall,
617 kIntrinsified);
618 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
619 locations->SetInAt(1, Location::RequiresRegister());
620 locations->SetInAt(2, Location::RequiresRegister());
621 locations->SetInAt(3, Location::RequiresRegister());
622
623 if (type == Primitive::kPrimLong) {
624 // Potentially need temps for ldrexd-strexd loop.
625 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
626 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
627 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
628 }
629 } else if (type == Primitive::kPrimNot) {
630 // Temps for card-marking.
631 locations->AddTemp(Location::RequiresRegister()); // Temp.
632 locations->AddTemp(Location::RequiresRegister()); // Card.
633 }
634}
635
636void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
637 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
638}
639void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
640 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
641}
642void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
643 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke);
644}
645void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
646 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
647}
648void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
649 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
650}
651void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
652 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke);
653}
654void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
655 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
656}
657void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
658 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
659}
660void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
661 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke);
662}
663
664static void GenUnsafePut(LocationSummary* locations,
665 Primitive::Type type,
666 bool is_volatile,
667 bool is_ordered,
668 CodeGeneratorARM* codegen) {
669 ArmAssembler* assembler = codegen->GetAssembler();
670
671 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
672 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
673 Register value;
674
675 if (is_volatile || is_ordered) {
676 __ dmb(ISH);
677 }
678
679 if (type == Primitive::kPrimLong) {
680 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
681 value = value_lo;
682 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
683 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
684 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
685 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
686
687 __ add(IP, base, ShifterOperand(offset));
688 Label loop_head;
689 __ Bind(&loop_head);
690 __ ldrexd(temp_lo, temp_hi, IP);
691 __ strexd(temp_lo, value_lo, value_hi, IP);
692 __ cmp(temp_lo, ShifterOperand(0));
693 __ b(&loop_head, NE);
694 } else {
695 __ add(IP, base, ShifterOperand(offset));
696 __ strd(value_lo, Address(IP));
697 }
698 } else {
Roland Levillain4d027112015-07-01 15:41:14 +0100699 value = locations->InAt(3).AsRegister<Register>();
700 Register source = value;
701 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
702 Register temp = locations->GetTemp(0).AsRegister<Register>();
703 __ Mov(temp, value);
704 __ PoisonHeapReference(temp);
705 source = temp;
706 }
707 __ str(source, Address(base, offset));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800708 }
709
710 if (is_volatile) {
711 __ dmb(ISH);
712 }
713
714 if (type == Primitive::kPrimNot) {
715 Register temp = locations->GetTemp(0).AsRegister<Register>();
716 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100717 bool value_can_be_null = true; // TODO: Worth finding out this information?
718 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800719 }
720}
721
722void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
723 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
724}
725void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
726 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
727}
728void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
729 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
730}
731void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
732 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
733}
734void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
735 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
736}
737void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
738 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
739}
740void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
741 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
742}
743void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
744 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
745}
746void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
747 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
748}
749
750static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
751 HInvoke* invoke) {
752 LocationSummary* locations = new (arena) LocationSummary(invoke,
753 LocationSummary::kNoCall,
754 kIntrinsified);
755 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
756 locations->SetInAt(1, Location::RequiresRegister());
757 locations->SetInAt(2, Location::RequiresRegister());
758 locations->SetInAt(3, Location::RequiresRegister());
759 locations->SetInAt(4, Location::RequiresRegister());
760
761 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
762
763 locations->AddTemp(Location::RequiresRegister()); // Pointer.
764 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
765 locations->AddTemp(Location::RequiresRegister()); // Temp 2.
766}
767
768static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
769 DCHECK_NE(type, Primitive::kPrimLong);
770
771 ArmAssembler* assembler = codegen->GetAssembler();
772
773 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
774
775 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
776 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
777 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
778 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
779
780 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
781 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
782
783 if (type == Primitive::kPrimNot) {
784 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
785 // object and scan the receiver at the next GC for nothing.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100786 bool value_can_be_null = true; // TODO: Worth finding out this information?
787 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800788 }
789
790 // Prevent reordering with prior memory operations.
791 __ dmb(ISH);
792
793 __ add(tmp_ptr, base, ShifterOperand(offset));
794
Roland Levillain4d027112015-07-01 15:41:14 +0100795 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
796 codegen->GetAssembler()->PoisonHeapReference(expected_lo);
797 codegen->GetAssembler()->PoisonHeapReference(value_lo);
798 }
799
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800800 // do {
801 // tmp = [r_ptr] - expected;
802 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
803 // result = tmp != 0;
804
805 Label loop_head;
806 __ Bind(&loop_head);
807
808 __ ldrex(tmp_lo, tmp_ptr);
809
810 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
811
812 __ it(EQ, ItState::kItT);
813 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
814 __ cmp(tmp_lo, ShifterOperand(1), EQ);
815
816 __ b(&loop_head, EQ);
817
818 __ dmb(ISH);
819
820 __ rsbs(out, tmp_lo, ShifterOperand(1));
821 __ it(CC);
822 __ mov(out, ShifterOperand(0), CC);
Roland Levillain4d027112015-07-01 15:41:14 +0100823
824 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
825 codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
826 codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
827 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800828}
829
Andreas Gampeca714582015-04-03 19:41:34 -0700830void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800831 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
832}
Andreas Gampeca714582015-04-03 19:41:34 -0700833void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800834 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
835}
836void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
837 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
838}
839void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
840 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
841}
842
843void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
844 LocationSummary* locations = new (arena_) LocationSummary(invoke,
845 LocationSummary::kCallOnSlowPath,
846 kIntrinsified);
847 locations->SetInAt(0, Location::RequiresRegister());
848 locations->SetInAt(1, Location::RequiresRegister());
849 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
850
851 locations->AddTemp(Location::RequiresRegister());
852 locations->AddTemp(Location::RequiresRegister());
853}
854
855void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
856 ArmAssembler* assembler = GetAssembler();
857 LocationSummary* locations = invoke->GetLocations();
858
859 // Location of reference to data array
860 const MemberOffset value_offset = mirror::String::ValueOffset();
861 // Location of count
862 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800863
864 Register obj = locations->InAt(0).AsRegister<Register>(); // String object pointer.
865 Register idx = locations->InAt(1).AsRegister<Register>(); // Index of character.
866 Register out = locations->Out().AsRegister<Register>(); // Result character.
867
868 Register temp = locations->GetTemp(0).AsRegister<Register>();
869 Register array_temp = locations->GetTemp(1).AsRegister<Register>();
870
871 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
872 // the cost.
873 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
874 // we will not optimize the code for constants (which would save a register).
875
876 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
877 codegen_->AddSlowPath(slow_path);
878
879 __ ldr(temp, Address(obj, count_offset.Int32Value())); // temp = str.length.
880 codegen_->MaybeRecordImplicitNullCheck(invoke);
881 __ cmp(idx, ShifterOperand(temp));
882 __ b(slow_path->GetEntryLabel(), CS);
883
Jeff Hao848f70a2014-01-15 13:49:50 -0800884 __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800885
886 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -0800887 __ ldrh(out, Address(array_temp, idx, LSL, 1)); // out := array_temp[idx].
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800888
889 __ Bind(slow_path->GetExitLabel());
890}
891
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000892void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
893 // The inputs plus one temp.
894 LocationSummary* locations = new (arena_) LocationSummary(invoke,
895 LocationSummary::kCall,
896 kIntrinsified);
897 InvokeRuntimeCallingConvention calling_convention;
898 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
899 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
900 locations->SetOut(Location::RegisterLocation(R0));
901}
902
903void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
904 ArmAssembler* assembler = GetAssembler();
905 LocationSummary* locations = invoke->GetLocations();
906
Nicolas Geoffray512e04d2015-03-27 17:21:24 +0000907 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +0100908 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000909
910 Register argument = locations->InAt(1).AsRegister<Register>();
911 __ cmp(argument, ShifterOperand(0));
912 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
913 codegen_->AddSlowPath(slow_path);
914 __ b(slow_path->GetEntryLabel(), EQ);
915
916 __ LoadFromOffset(
917 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
918 __ blx(LR);
919 __ Bind(slow_path->GetExitLabel());
920}
921
Agi Csaki289cd552015-08-18 17:10:38 -0700922void IntrinsicLocationsBuilderARM::VisitStringEquals(HInvoke* invoke) {
923 LocationSummary* locations = new (arena_) LocationSummary(invoke,
924 LocationSummary::kNoCall,
925 kIntrinsified);
926 InvokeRuntimeCallingConvention calling_convention;
927 locations->SetInAt(0, Location::RequiresRegister());
928 locations->SetInAt(1, Location::RequiresRegister());
929 // Temporary registers to store lengths of strings and for calculations.
930 // Using instruction cbz requires a low register, so explicitly set a temp to be R0.
931 locations->AddTemp(Location::RegisterLocation(R0));
932 locations->AddTemp(Location::RequiresRegister());
933 locations->AddTemp(Location::RequiresRegister());
934
935 locations->SetOut(Location::RequiresRegister());
936}
937
938void IntrinsicCodeGeneratorARM::VisitStringEquals(HInvoke* invoke) {
939 ArmAssembler* assembler = GetAssembler();
940 LocationSummary* locations = invoke->GetLocations();
941
942 Register str = locations->InAt(0).AsRegister<Register>();
943 Register arg = locations->InAt(1).AsRegister<Register>();
944 Register out = locations->Out().AsRegister<Register>();
945
946 Register temp = locations->GetTemp(0).AsRegister<Register>();
947 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
948 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
949
950 Label loop;
951 Label end;
952 Label return_true;
953 Label return_false;
954
955 // Get offsets of count, value, and class fields within a string object.
956 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
957 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
958 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
959
960 // Note that the null check must have been done earlier.
961 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
962
963 // Check if input is null, return false if it is.
964 __ CompareAndBranchIfZero(arg, &return_false);
965
966 // Instanceof check for the argument by comparing class fields.
967 // All string objects must have the same type since String cannot be subclassed.
968 // Receiver must be a string object, so its class field is equal to all strings' class fields.
969 // If the argument is a string object, its class field must be equal to receiver's class field.
970 __ ldr(temp, Address(str, class_offset));
971 __ ldr(temp1, Address(arg, class_offset));
972 __ cmp(temp, ShifterOperand(temp1));
973 __ b(&return_false, NE);
974
975 // Load lengths of this and argument strings.
976 __ ldr(temp, Address(str, count_offset));
977 __ ldr(temp1, Address(arg, count_offset));
978 // Check if lengths are equal, return false if they're not.
979 __ cmp(temp, ShifterOperand(temp1));
980 __ b(&return_false, NE);
981 // Return true if both strings are empty.
982 __ cbz(temp, &return_true);
983
984 // Reference equality check, return true if same reference.
985 __ cmp(str, ShifterOperand(arg));
986 __ b(&return_true, EQ);
987
988 // Assertions that must hold in order to compare strings 2 characters at a time.
989 DCHECK_ALIGNED(value_offset, 4);
990 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
991
Agi Csaki289cd552015-08-18 17:10:38 -0700992 __ LoadImmediate(temp1, value_offset);
Agi Csaki289cd552015-08-18 17:10:38 -0700993
994 // Loop to compare strings 2 characters at a time starting at the front of the string.
995 // Ok to do this because strings with an odd length are zero-padded.
996 __ Bind(&loop);
997 __ ldr(out, Address(str, temp1));
998 __ ldr(temp2, Address(arg, temp1));
999 __ cmp(out, ShifterOperand(temp2));
1000 __ b(&return_false, NE);
1001 __ add(temp1, temp1, ShifterOperand(sizeof(uint32_t)));
Vladimir Markoa63f0d42015-09-01 13:36:35 +01001002 __ subs(temp, temp, ShifterOperand(sizeof(uint32_t) / sizeof(uint16_t)));
1003 __ b(&loop, GT);
Agi Csaki289cd552015-08-18 17:10:38 -07001004
1005 // Return true and exit the function.
1006 // If loop does not result in returning false, we return true.
1007 __ Bind(&return_true);
1008 __ LoadImmediate(out, 1);
1009 __ b(&end);
1010
1011 // Return false and exit the function.
1012 __ Bind(&return_false);
1013 __ LoadImmediate(out, 0);
1014 __ Bind(&end);
1015}
1016
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001017static void GenerateVisitStringIndexOf(HInvoke* invoke,
1018 ArmAssembler* assembler,
1019 CodeGeneratorARM* codegen,
1020 ArenaAllocator* allocator,
1021 bool start_at_zero) {
1022 LocationSummary* locations = invoke->GetLocations();
1023 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
1024
1025 // Note that the null check must have been done earlier.
1026 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1027
1028 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1029 // or directly dispatch if we have a constant.
1030 SlowPathCodeARM* slow_path = nullptr;
1031 if (invoke->InputAt(1)->IsIntConstant()) {
1032 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
1033 std::numeric_limits<uint16_t>::max()) {
1034 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1035 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1036 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1037 codegen->AddSlowPath(slow_path);
1038 __ b(slow_path->GetEntryLabel());
1039 __ Bind(slow_path->GetExitLabel());
1040 return;
1041 }
1042 } else {
1043 Register char_reg = locations->InAt(1).AsRegister<Register>();
1044 __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max());
1045 __ cmp(char_reg, ShifterOperand(tmp_reg));
1046 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1047 codegen->AddSlowPath(slow_path);
1048 __ b(slow_path->GetEntryLabel(), HI);
1049 }
1050
1051 if (start_at_zero) {
1052 DCHECK_EQ(tmp_reg, R2);
1053 // Start-index = 0.
1054 __ LoadImmediate(tmp_reg, 0);
1055 }
1056
1057 __ LoadFromOffset(kLoadWord, LR, TR,
1058 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
1059 __ blx(LR);
1060
1061 if (slow_path != nullptr) {
1062 __ Bind(slow_path->GetExitLabel());
1063 }
1064}
1065
1066void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
1067 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1068 LocationSummary::kCall,
1069 kIntrinsified);
1070 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1071 // best to align the inputs accordingly.
1072 InvokeRuntimeCallingConvention calling_convention;
1073 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1074 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1075 locations->SetOut(Location::RegisterLocation(R0));
1076
1077 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1078 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1079}
1080
1081void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
1082 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
1083}
1084
1085void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1086 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1087 LocationSummary::kCall,
1088 kIntrinsified);
1089 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1090 // best to align the inputs accordingly.
1091 InvokeRuntimeCallingConvention calling_convention;
1092 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1093 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1094 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1095 locations->SetOut(Location::RegisterLocation(R0));
1096
1097 // Need a temp for slow-path codepoint compare.
1098 locations->AddTemp(Location::RequiresRegister());
1099}
1100
1101void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1102 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
1103}
1104
Jeff Hao848f70a2014-01-15 13:49:50 -08001105void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1106 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1107 LocationSummary::kCall,
1108 kIntrinsified);
1109 InvokeRuntimeCallingConvention calling_convention;
1110 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1111 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1112 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1113 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1114 locations->SetOut(Location::RegisterLocation(R0));
1115}
1116
1117void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1118 ArmAssembler* assembler = GetAssembler();
1119 LocationSummary* locations = invoke->GetLocations();
1120
1121 Register byte_array = locations->InAt(0).AsRegister<Register>();
1122 __ cmp(byte_array, ShifterOperand(0));
1123 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1124 codegen_->AddSlowPath(slow_path);
1125 __ b(slow_path->GetEntryLabel(), EQ);
1126
1127 __ LoadFromOffset(
1128 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
1129 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1130 __ blx(LR);
1131 __ Bind(slow_path->GetExitLabel());
1132}
1133
1134void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1135 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1136 LocationSummary::kCall,
1137 kIntrinsified);
1138 InvokeRuntimeCallingConvention calling_convention;
1139 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1140 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1141 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1142 locations->SetOut(Location::RegisterLocation(R0));
1143}
1144
1145void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1146 ArmAssembler* assembler = GetAssembler();
1147
1148 __ LoadFromOffset(
1149 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
1150 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1151 __ blx(LR);
1152}
1153
1154void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1155 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1156 LocationSummary::kCall,
1157 kIntrinsified);
1158 InvokeRuntimeCallingConvention calling_convention;
1159 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1160 locations->SetOut(Location::RegisterLocation(R0));
1161}
1162
1163void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1164 ArmAssembler* assembler = GetAssembler();
1165 LocationSummary* locations = invoke->GetLocations();
1166
1167 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1168 __ cmp(string_to_copy, ShifterOperand(0));
1169 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1170 codegen_->AddSlowPath(slow_path);
1171 __ b(slow_path->GetEntryLabel(), EQ);
1172
1173 __ LoadFromOffset(kLoadWord,
1174 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
1175 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1176 __ blx(LR);
1177 __ Bind(slow_path->GetExitLabel());
1178}
1179
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001180// Unimplemented intrinsics.
1181
1182#define UNIMPLEMENTED_INTRINSIC(Name) \
1183void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1184} \
1185void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1186}
1187
1188UNIMPLEMENTED_INTRINSIC(IntegerReverse)
1189UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
1190UNIMPLEMENTED_INTRINSIC(LongReverse)
1191UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
1192UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
1193UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
1194UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
1195UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
1196UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
1197UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
1198UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
1199UNIMPLEMENTED_INTRINSIC(MathCeil) // Could be done by changing rounding mode, maybe?
1200UNIMPLEMENTED_INTRINSIC(MathFloor) // Could be done by changing rounding mode, maybe?
1201UNIMPLEMENTED_INTRINSIC(MathRint)
1202UNIMPLEMENTED_INTRINSIC(MathRoundDouble) // Could be done by changing rounding mode, maybe?
1203UNIMPLEMENTED_INTRINSIC(MathRoundFloat) // Could be done by changing rounding mode, maybe?
1204UNIMPLEMENTED_INTRINSIC(UnsafeCASLong) // High register pressure.
1205UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001206UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001207UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001208
Roland Levillain4d027112015-07-01 15:41:14 +01001209#undef UNIMPLEMENTED_INTRINSIC
1210
1211#undef __
1212
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001213} // namespace arm
1214} // namespace art