blob: dccfe9a0ca9d8fd542c075ce086641e17e0fbec1 [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "code_generator_arm.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "intrinsics.h"
23#include "mirror/array-inl.h"
24#include "mirror/art_method.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/arm/assembler_arm.h"
28
29namespace art {
30
31namespace arm {
32
33ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
34 return codegen_->GetAssembler();
35}
36
37ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
38 return codegen_->GetGraph()->GetArena();
39}
40
41#define __ codegen->GetAssembler()->
42
43static void MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM* codegen) {
44 if (!trg.IsValid()) {
45 DCHECK(type == Primitive::kPrimVoid);
46 return;
47 }
48
49 DCHECK_NE(type, Primitive::kPrimVoid);
50
Jeff Hao848f70a2014-01-15 13:49:50 -080051 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080052 if (type == Primitive::kPrimLong) {
53 Register trg_reg_lo = trg.AsRegisterPairLow<Register>();
54 Register trg_reg_hi = trg.AsRegisterPairHigh<Register>();
55 Register res_reg_lo = R0;
56 Register res_reg_hi = R1;
57 if (trg_reg_lo != res_reg_hi) {
58 if (trg_reg_lo != res_reg_lo) {
59 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
60 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
61 } else {
62 DCHECK_EQ(trg_reg_lo + 1, trg_reg_hi);
63 }
64 } else {
65 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
66 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
67 }
68 } else {
69 Register trg_reg = trg.AsRegister<Register>();
70 Register res_reg = R0;
71 if (trg_reg != res_reg) {
72 __ mov(trg_reg, ShifterOperand(res_reg));
73 }
74 }
75 } else {
76 UNIMPLEMENTED(FATAL) << "Floating-point return.";
77 }
78}
79
Roland Levillainec525fc2015-04-28 15:50:20 +010080static void MoveArguments(HInvoke* invoke, CodeGeneratorARM* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010081 InvokeDexCallingConventionVisitorARM calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010082 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080083}
84
85// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
86// call. This will copy the arguments into the positions for a regular call.
87//
88// Note: The actual parameters are required to be in the locations given by the invoke's location
89// summary. If an intrinsic modifies those locations before a slowpath call, they must be
90// restored!
91class IntrinsicSlowPathARM : public SlowPathCodeARM {
92 public:
93 explicit IntrinsicSlowPathARM(HInvoke* invoke) : invoke_(invoke) { }
94
95 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
96 CodeGeneratorARM* codegen = down_cast<CodeGeneratorARM*>(codegen_in);
97 __ Bind(GetEntryLabel());
98
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +000099 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800100
Roland Levillainec525fc2015-04-28 15:50:20 +0100101 MoveArguments(invoke_, codegen);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800102
103 if (invoke_->IsInvokeStaticOrDirect()) {
104 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister);
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000105 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800106 } else {
107 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
108 UNREACHABLE();
109 }
110
111 // Copy the result back to the expected output.
112 Location out = invoke_->GetLocations()->Out();
113 if (out.IsValid()) {
114 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
115 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
116 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
117 }
118
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000119 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800120 __ b(GetExitLabel());
121 }
122
123 private:
124 // The instruction where this slow path is happening.
125 HInvoke* const invoke_;
126
127 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM);
128};
129
130#undef __
131
132bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
133 Dispatch(invoke);
134 LocationSummary* res = invoke->GetLocations();
135 return res != nullptr && res->Intrinsified();
136}
137
138#define __ assembler->
139
140static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
141 LocationSummary* locations = new (arena) LocationSummary(invoke,
142 LocationSummary::kNoCall,
143 kIntrinsified);
144 locations->SetInAt(0, Location::RequiresFpuRegister());
145 locations->SetOut(Location::RequiresRegister());
146}
147
148static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
149 LocationSummary* locations = new (arena) LocationSummary(invoke,
150 LocationSummary::kNoCall,
151 kIntrinsified);
152 locations->SetInAt(0, Location::RequiresRegister());
153 locations->SetOut(Location::RequiresFpuRegister());
154}
155
156static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
157 Location input = locations->InAt(0);
158 Location output = locations->Out();
159 if (is64bit) {
160 __ vmovrrd(output.AsRegisterPairLow<Register>(),
161 output.AsRegisterPairHigh<Register>(),
162 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
163 } else {
164 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
165 }
166}
167
168static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
169 Location input = locations->InAt(0);
170 Location output = locations->Out();
171 if (is64bit) {
172 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
173 input.AsRegisterPairLow<Register>(),
174 input.AsRegisterPairHigh<Register>());
175 } else {
176 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
177 }
178}
179
180void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
181 CreateFPToIntLocations(arena_, invoke);
182}
183void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
184 CreateIntToFPLocations(arena_, invoke);
185}
186
187void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
188 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
189}
190void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
191 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
192}
193
194void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
195 CreateFPToIntLocations(arena_, invoke);
196}
197void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
198 CreateIntToFPLocations(arena_, invoke);
199}
200
201void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
202 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
203}
204void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
205 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
206}
207
208static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
209 LocationSummary* locations = new (arena) LocationSummary(invoke,
210 LocationSummary::kNoCall,
211 kIntrinsified);
212 locations->SetInAt(0, Location::RequiresRegister());
213 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
214}
215
216static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
217 LocationSummary* locations = new (arena) LocationSummary(invoke,
218 LocationSummary::kNoCall,
219 kIntrinsified);
220 locations->SetInAt(0, Location::RequiresFpuRegister());
221 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
222}
223
224static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
225 Location in = locations->InAt(0);
226 Location out = locations->Out();
227
228 if (is64bit) {
229 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
230 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
231 } else {
232 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
233 }
234}
235
236void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
237 CreateFPToFPLocations(arena_, invoke);
238}
239
240void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
241 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
242}
243
244void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
245 CreateFPToFPLocations(arena_, invoke);
246}
247
248void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
249 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
250}
251
252static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
253 LocationSummary* locations = new (arena) LocationSummary(invoke,
254 LocationSummary::kNoCall,
255 kIntrinsified);
256 locations->SetInAt(0, Location::RequiresRegister());
257 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
258
259 locations->AddTemp(Location::RequiresRegister());
260}
261
262static void GenAbsInteger(LocationSummary* locations,
263 bool is64bit,
264 ArmAssembler* assembler) {
265 Location in = locations->InAt(0);
266 Location output = locations->Out();
267
268 Register mask = locations->GetTemp(0).AsRegister<Register>();
269
270 if (is64bit) {
271 Register in_reg_lo = in.AsRegisterPairLow<Register>();
272 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
273 Register out_reg_lo = output.AsRegisterPairLow<Register>();
274 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
275
276 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
277
278 __ Asr(mask, in_reg_hi, 31);
279 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
280 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
281 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
282 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
283 } else {
284 Register in_reg = in.AsRegister<Register>();
285 Register out_reg = output.AsRegister<Register>();
286
287 __ Asr(mask, in_reg, 31);
288 __ add(out_reg, in_reg, ShifterOperand(mask));
289 __ eor(out_reg, mask, ShifterOperand(out_reg));
290 }
291}
292
293void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
294 CreateIntToIntPlusTemp(arena_, invoke);
295}
296
297void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
298 GenAbsInteger(invoke->GetLocations(), false, GetAssembler());
299}
300
301
302void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
303 CreateIntToIntPlusTemp(arena_, invoke);
304}
305
306void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
307 GenAbsInteger(invoke->GetLocations(), true, GetAssembler());
308}
309
310static void GenMinMax(LocationSummary* locations,
311 bool is_min,
312 ArmAssembler* assembler) {
313 Register op1 = locations->InAt(0).AsRegister<Register>();
314 Register op2 = locations->InAt(1).AsRegister<Register>();
315 Register out = locations->Out().AsRegister<Register>();
316
317 __ cmp(op1, ShifterOperand(op2));
318
319 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
320 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
321 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
322}
323
324static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
325 LocationSummary* locations = new (arena) LocationSummary(invoke,
326 LocationSummary::kNoCall,
327 kIntrinsified);
328 locations->SetInAt(0, Location::RequiresRegister());
329 locations->SetInAt(1, Location::RequiresRegister());
330 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
331}
332
333void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
334 CreateIntIntToIntLocations(arena_, invoke);
335}
336
337void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
338 GenMinMax(invoke->GetLocations(), true, GetAssembler());
339}
340
341void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
342 CreateIntIntToIntLocations(arena_, invoke);
343}
344
345void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
346 GenMinMax(invoke->GetLocations(), false, GetAssembler());
347}
348
349void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
350 CreateFPToFPLocations(arena_, invoke);
351}
352
353void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
354 LocationSummary* locations = invoke->GetLocations();
355 ArmAssembler* assembler = GetAssembler();
356 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
357 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
358}
359
360void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
361 CreateIntToIntLocations(arena_, invoke);
362}
363
364void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
365 ArmAssembler* assembler = GetAssembler();
366 // Ignore upper 4B of long address.
367 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
368 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
369}
370
371void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
372 CreateIntToIntLocations(arena_, invoke);
373}
374
375void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
376 ArmAssembler* assembler = GetAssembler();
377 // Ignore upper 4B of long address.
378 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
379 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
380}
381
382void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
383 CreateIntToIntLocations(arena_, invoke);
384}
385
386void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
387 ArmAssembler* assembler = GetAssembler();
388 // Ignore upper 4B of long address.
389 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
390 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
391 // exception. So we can't use ldrd as addr may be unaligned.
392 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
393 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
394 if (addr == lo) {
395 __ ldr(hi, Address(addr, 4));
396 __ ldr(lo, Address(addr, 0));
397 } else {
398 __ ldr(lo, Address(addr, 0));
399 __ ldr(hi, Address(addr, 4));
400 }
401}
402
403void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
404 CreateIntToIntLocations(arena_, invoke);
405}
406
407void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
408 ArmAssembler* assembler = GetAssembler();
409 // Ignore upper 4B of long address.
410 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
411 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
412}
413
414static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
415 LocationSummary* locations = new (arena) LocationSummary(invoke,
416 LocationSummary::kNoCall,
417 kIntrinsified);
418 locations->SetInAt(0, Location::RequiresRegister());
419 locations->SetInAt(1, Location::RequiresRegister());
420}
421
422void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
423 CreateIntIntToVoidLocations(arena_, invoke);
424}
425
426void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
427 ArmAssembler* assembler = GetAssembler();
428 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
429 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
430}
431
432void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
433 CreateIntIntToVoidLocations(arena_, invoke);
434}
435
436void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
437 ArmAssembler* assembler = GetAssembler();
438 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
439 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
440}
441
442void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
443 CreateIntIntToVoidLocations(arena_, invoke);
444}
445
446void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
447 ArmAssembler* assembler = GetAssembler();
448 // Ignore upper 4B of long address.
449 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
450 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
451 // exception. So we can't use ldrd as addr may be unaligned.
452 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
453 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
454}
455
456void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
457 CreateIntIntToVoidLocations(arena_, invoke);
458}
459
460void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
461 ArmAssembler* assembler = GetAssembler();
462 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
463 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
464}
465
466void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
467 LocationSummary* locations = new (arena_) LocationSummary(invoke,
468 LocationSummary::kNoCall,
469 kIntrinsified);
470 locations->SetOut(Location::RequiresRegister());
471}
472
473void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
474 ArmAssembler* assembler = GetAssembler();
475 __ LoadFromOffset(kLoadWord,
476 invoke->GetLocations()->Out().AsRegister<Register>(),
477 TR,
478 Thread::PeerOffset<kArmPointerSize>().Int32Value());
479}
480
481static void GenUnsafeGet(HInvoke* invoke,
482 Primitive::Type type,
483 bool is_volatile,
484 CodeGeneratorARM* codegen) {
485 LocationSummary* locations = invoke->GetLocations();
486 DCHECK((type == Primitive::kPrimInt) ||
487 (type == Primitive::kPrimLong) ||
488 (type == Primitive::kPrimNot));
489 ArmAssembler* assembler = codegen->GetAssembler();
490 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
491 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
492
493 if (type == Primitive::kPrimLong) {
494 Register trg_lo = locations->Out().AsRegisterPairLow<Register>();
495 __ add(IP, base, ShifterOperand(offset));
496 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
497 Register trg_hi = locations->Out().AsRegisterPairHigh<Register>();
498 __ ldrexd(trg_lo, trg_hi, IP);
499 } else {
500 __ ldrd(trg_lo, Address(IP));
501 }
502 } else {
503 Register trg = locations->Out().AsRegister<Register>();
504 __ ldr(trg, Address(base, offset));
505 }
506
507 if (is_volatile) {
508 __ dmb(ISH);
509 }
510}
511
512static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
513 LocationSummary* locations = new (arena) LocationSummary(invoke,
514 LocationSummary::kNoCall,
515 kIntrinsified);
516 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
517 locations->SetInAt(1, Location::RequiresRegister());
518 locations->SetInAt(2, Location::RequiresRegister());
519 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
520}
521
522void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
523 CreateIntIntIntToIntLocations(arena_, invoke);
524}
525void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
526 CreateIntIntIntToIntLocations(arena_, invoke);
527}
528void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
529 CreateIntIntIntToIntLocations(arena_, invoke);
530}
531void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
532 CreateIntIntIntToIntLocations(arena_, invoke);
533}
534void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
535 CreateIntIntIntToIntLocations(arena_, invoke);
536}
537void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
538 CreateIntIntIntToIntLocations(arena_, invoke);
539}
540
541void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
542 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
543}
544void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
545 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
546}
547void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
548 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
549}
550void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
551 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
552}
553void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
554 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
555}
556void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
557 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
558}
559
560static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
561 const ArmInstructionSetFeatures& features,
562 Primitive::Type type,
563 bool is_volatile,
564 HInvoke* invoke) {
565 LocationSummary* locations = new (arena) LocationSummary(invoke,
566 LocationSummary::kNoCall,
567 kIntrinsified);
568 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
569 locations->SetInAt(1, Location::RequiresRegister());
570 locations->SetInAt(2, Location::RequiresRegister());
571 locations->SetInAt(3, Location::RequiresRegister());
572
573 if (type == Primitive::kPrimLong) {
574 // Potentially need temps for ldrexd-strexd loop.
575 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
576 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
577 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
578 }
579 } else if (type == Primitive::kPrimNot) {
580 // Temps for card-marking.
581 locations->AddTemp(Location::RequiresRegister()); // Temp.
582 locations->AddTemp(Location::RequiresRegister()); // Card.
583 }
584}
585
586void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
587 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
588}
589void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
590 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
591}
592void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
593 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke);
594}
595void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
596 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
597}
598void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
599 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
600}
601void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
602 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke);
603}
604void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
605 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
606}
607void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
608 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
609}
610void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
611 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke);
612}
613
614static void GenUnsafePut(LocationSummary* locations,
615 Primitive::Type type,
616 bool is_volatile,
617 bool is_ordered,
618 CodeGeneratorARM* codegen) {
619 ArmAssembler* assembler = codegen->GetAssembler();
620
621 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
622 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
623 Register value;
624
625 if (is_volatile || is_ordered) {
626 __ dmb(ISH);
627 }
628
629 if (type == Primitive::kPrimLong) {
630 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
631 value = value_lo;
632 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
633 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
634 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
635 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
636
637 __ add(IP, base, ShifterOperand(offset));
638 Label loop_head;
639 __ Bind(&loop_head);
640 __ ldrexd(temp_lo, temp_hi, IP);
641 __ strexd(temp_lo, value_lo, value_hi, IP);
642 __ cmp(temp_lo, ShifterOperand(0));
643 __ b(&loop_head, NE);
644 } else {
645 __ add(IP, base, ShifterOperand(offset));
646 __ strd(value_lo, Address(IP));
647 }
648 } else {
649 value = locations->InAt(3).AsRegister<Register>();
650 __ str(value, Address(base, offset));
651 }
652
653 if (is_volatile) {
654 __ dmb(ISH);
655 }
656
657 if (type == Primitive::kPrimNot) {
658 Register temp = locations->GetTemp(0).AsRegister<Register>();
659 Register card = locations->GetTemp(1).AsRegister<Register>();
660 codegen->MarkGCCard(temp, card, base, value);
661 }
662}
663
664void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
665 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
666}
667void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
668 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
669}
670void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
671 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
672}
673void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
674 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
675}
676void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
677 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
678}
679void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
680 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
681}
682void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
683 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
684}
685void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
686 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
687}
688void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
689 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
690}
691
692static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
693 HInvoke* invoke) {
694 LocationSummary* locations = new (arena) LocationSummary(invoke,
695 LocationSummary::kNoCall,
696 kIntrinsified);
697 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
698 locations->SetInAt(1, Location::RequiresRegister());
699 locations->SetInAt(2, Location::RequiresRegister());
700 locations->SetInAt(3, Location::RequiresRegister());
701 locations->SetInAt(4, Location::RequiresRegister());
702
703 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
704
705 locations->AddTemp(Location::RequiresRegister()); // Pointer.
706 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
707 locations->AddTemp(Location::RequiresRegister()); // Temp 2.
708}
709
710static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
711 DCHECK_NE(type, Primitive::kPrimLong);
712
713 ArmAssembler* assembler = codegen->GetAssembler();
714
715 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
716
717 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
718 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
719 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
720 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
721
722 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
723 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
724
725 if (type == Primitive::kPrimNot) {
726 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
727 // object and scan the receiver at the next GC for nothing.
728 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo);
729 }
730
731 // Prevent reordering with prior memory operations.
732 __ dmb(ISH);
733
734 __ add(tmp_ptr, base, ShifterOperand(offset));
735
736 // do {
737 // tmp = [r_ptr] - expected;
738 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
739 // result = tmp != 0;
740
741 Label loop_head;
742 __ Bind(&loop_head);
743
744 __ ldrex(tmp_lo, tmp_ptr);
745
746 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
747
748 __ it(EQ, ItState::kItT);
749 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
750 __ cmp(tmp_lo, ShifterOperand(1), EQ);
751
752 __ b(&loop_head, EQ);
753
754 __ dmb(ISH);
755
756 __ rsbs(out, tmp_lo, ShifterOperand(1));
757 __ it(CC);
758 __ mov(out, ShifterOperand(0), CC);
759}
760
Andreas Gampeca714582015-04-03 19:41:34 -0700761void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800762 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
763}
Andreas Gampeca714582015-04-03 19:41:34 -0700764void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800765 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
766}
767void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
768 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
769}
770void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
771 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
772}
773
774void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
775 LocationSummary* locations = new (arena_) LocationSummary(invoke,
776 LocationSummary::kCallOnSlowPath,
777 kIntrinsified);
778 locations->SetInAt(0, Location::RequiresRegister());
779 locations->SetInAt(1, Location::RequiresRegister());
780 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
781
782 locations->AddTemp(Location::RequiresRegister());
783 locations->AddTemp(Location::RequiresRegister());
784}
785
786void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
787 ArmAssembler* assembler = GetAssembler();
788 LocationSummary* locations = invoke->GetLocations();
789
790 // Location of reference to data array
791 const MemberOffset value_offset = mirror::String::ValueOffset();
792 // Location of count
793 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800794
795 Register obj = locations->InAt(0).AsRegister<Register>(); // String object pointer.
796 Register idx = locations->InAt(1).AsRegister<Register>(); // Index of character.
797 Register out = locations->Out().AsRegister<Register>(); // Result character.
798
799 Register temp = locations->GetTemp(0).AsRegister<Register>();
800 Register array_temp = locations->GetTemp(1).AsRegister<Register>();
801
802 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
803 // the cost.
804 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
805 // we will not optimize the code for constants (which would save a register).
806
807 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
808 codegen_->AddSlowPath(slow_path);
809
810 __ ldr(temp, Address(obj, count_offset.Int32Value())); // temp = str.length.
811 codegen_->MaybeRecordImplicitNullCheck(invoke);
812 __ cmp(idx, ShifterOperand(temp));
813 __ b(slow_path->GetEntryLabel(), CS);
814
Jeff Hao848f70a2014-01-15 13:49:50 -0800815 __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800816
817 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -0800818 __ ldrh(out, Address(array_temp, idx, LSL, 1)); // out := array_temp[idx].
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800819
820 __ Bind(slow_path->GetExitLabel());
821}
822
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000823void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
824 // The inputs plus one temp.
825 LocationSummary* locations = new (arena_) LocationSummary(invoke,
826 LocationSummary::kCall,
827 kIntrinsified);
828 InvokeRuntimeCallingConvention calling_convention;
829 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
830 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
831 locations->SetOut(Location::RegisterLocation(R0));
832}
833
834void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
835 ArmAssembler* assembler = GetAssembler();
836 LocationSummary* locations = invoke->GetLocations();
837
Nicolas Geoffray512e04d2015-03-27 17:21:24 +0000838 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +0100839 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000840
841 Register argument = locations->InAt(1).AsRegister<Register>();
842 __ cmp(argument, ShifterOperand(0));
843 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
844 codegen_->AddSlowPath(slow_path);
845 __ b(slow_path->GetEntryLabel(), EQ);
846
847 __ LoadFromOffset(
848 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
849 __ blx(LR);
850 __ Bind(slow_path->GetExitLabel());
851}
852
Andreas Gampeba6fdbc2015-05-07 22:31:55 -0700853static void GenerateVisitStringIndexOf(HInvoke* invoke,
854 ArmAssembler* assembler,
855 CodeGeneratorARM* codegen,
856 ArenaAllocator* allocator,
857 bool start_at_zero) {
858 LocationSummary* locations = invoke->GetLocations();
859 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
860
861 // Note that the null check must have been done earlier.
862 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
863
864 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
865 // or directly dispatch if we have a constant.
866 SlowPathCodeARM* slow_path = nullptr;
867 if (invoke->InputAt(1)->IsIntConstant()) {
868 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
869 std::numeric_limits<uint16_t>::max()) {
870 // Always needs the slow-path. We could directly dispatch to it, but this case should be
871 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
872 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
873 codegen->AddSlowPath(slow_path);
874 __ b(slow_path->GetEntryLabel());
875 __ Bind(slow_path->GetExitLabel());
876 return;
877 }
878 } else {
879 Register char_reg = locations->InAt(1).AsRegister<Register>();
880 __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max());
881 __ cmp(char_reg, ShifterOperand(tmp_reg));
882 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
883 codegen->AddSlowPath(slow_path);
884 __ b(slow_path->GetEntryLabel(), HI);
885 }
886
887 if (start_at_zero) {
888 DCHECK_EQ(tmp_reg, R2);
889 // Start-index = 0.
890 __ LoadImmediate(tmp_reg, 0);
891 }
892
893 __ LoadFromOffset(kLoadWord, LR, TR,
894 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
895 __ blx(LR);
896
897 if (slow_path != nullptr) {
898 __ Bind(slow_path->GetExitLabel());
899 }
900}
901
902void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
903 LocationSummary* locations = new (arena_) LocationSummary(invoke,
904 LocationSummary::kCall,
905 kIntrinsified);
906 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
907 // best to align the inputs accordingly.
908 InvokeRuntimeCallingConvention calling_convention;
909 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
910 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
911 locations->SetOut(Location::RegisterLocation(R0));
912
913 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
914 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
915}
916
917void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
918 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
919}
920
921void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
922 LocationSummary* locations = new (arena_) LocationSummary(invoke,
923 LocationSummary::kCall,
924 kIntrinsified);
925 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
926 // best to align the inputs accordingly.
927 InvokeRuntimeCallingConvention calling_convention;
928 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
929 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
930 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
931 locations->SetOut(Location::RegisterLocation(R0));
932
933 // Need a temp for slow-path codepoint compare.
934 locations->AddTemp(Location::RequiresRegister());
935}
936
937void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
938 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
939}
940
Jeff Hao848f70a2014-01-15 13:49:50 -0800941void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
942 LocationSummary* locations = new (arena_) LocationSummary(invoke,
943 LocationSummary::kCall,
944 kIntrinsified);
945 InvokeRuntimeCallingConvention calling_convention;
946 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
947 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
948 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
949 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
950 locations->SetOut(Location::RegisterLocation(R0));
951}
952
953void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
954 ArmAssembler* assembler = GetAssembler();
955 LocationSummary* locations = invoke->GetLocations();
956
957 Register byte_array = locations->InAt(0).AsRegister<Register>();
958 __ cmp(byte_array, ShifterOperand(0));
959 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
960 codegen_->AddSlowPath(slow_path);
961 __ b(slow_path->GetEntryLabel(), EQ);
962
963 __ LoadFromOffset(
964 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
965 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
966 __ blx(LR);
967 __ Bind(slow_path->GetExitLabel());
968}
969
970void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
971 LocationSummary* locations = new (arena_) LocationSummary(invoke,
972 LocationSummary::kCall,
973 kIntrinsified);
974 InvokeRuntimeCallingConvention calling_convention;
975 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
976 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
977 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
978 locations->SetOut(Location::RegisterLocation(R0));
979}
980
981void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
982 ArmAssembler* assembler = GetAssembler();
983
984 __ LoadFromOffset(
985 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
986 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
987 __ blx(LR);
988}
989
990void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
991 LocationSummary* locations = new (arena_) LocationSummary(invoke,
992 LocationSummary::kCall,
993 kIntrinsified);
994 InvokeRuntimeCallingConvention calling_convention;
995 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
996 locations->SetOut(Location::RegisterLocation(R0));
997}
998
999void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1000 ArmAssembler* assembler = GetAssembler();
1001 LocationSummary* locations = invoke->GetLocations();
1002
1003 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1004 __ cmp(string_to_copy, ShifterOperand(0));
1005 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1006 codegen_->AddSlowPath(slow_path);
1007 __ b(slow_path->GetEntryLabel(), EQ);
1008
1009 __ LoadFromOffset(kLoadWord,
1010 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
1011 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1012 __ blx(LR);
1013 __ Bind(slow_path->GetExitLabel());
1014}
1015
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001016// Unimplemented intrinsics.
1017
1018#define UNIMPLEMENTED_INTRINSIC(Name) \
1019void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1020} \
1021void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1022}
1023
1024UNIMPLEMENTED_INTRINSIC(IntegerReverse)
1025UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
1026UNIMPLEMENTED_INTRINSIC(LongReverse)
1027UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
1028UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
1029UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
1030UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
1031UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
1032UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
1033UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
1034UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
1035UNIMPLEMENTED_INTRINSIC(MathCeil) // Could be done by changing rounding mode, maybe?
1036UNIMPLEMENTED_INTRINSIC(MathFloor) // Could be done by changing rounding mode, maybe?
1037UNIMPLEMENTED_INTRINSIC(MathRint)
1038UNIMPLEMENTED_INTRINSIC(MathRoundDouble) // Could be done by changing rounding mode, maybe?
1039UNIMPLEMENTED_INTRINSIC(MathRoundFloat) // Could be done by changing rounding mode, maybe?
1040UNIMPLEMENTED_INTRINSIC(UnsafeCASLong) // High register pressure.
1041UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001042UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001043UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001044
1045} // namespace arm
1046} // namespace art