blob: 53497b673203d3a737a0b1149621a68af2b8c2b4 [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080020#include "code_generator_arm64.h"
21#include "common_arm64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080044using helpers::RegisterFrom;
45using helpers::SRegisterFrom;
46using helpers::WRegisterFrom;
47using helpers::XRegisterFrom;
48
49
50namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53 return MemOperand(XRegisterFrom(location), offset);
54}
55
56} // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59 return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63 return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69 Primitive::Type type,
70 CodeGeneratorARM64* codegen) {
71 if (!trg.IsValid()) {
72 DCHECK(type == Primitive::kPrimVoid);
73 return;
74 }
75
76 DCHECK_NE(type, Primitive::kPrimVoid);
77
Jeff Hao848f70a2014-01-15 13:49:50 -080078 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080079 Register trg_reg = RegisterFrom(trg, type);
80 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82 } else {
83 FPRegister trg_reg = FPRegisterFrom(trg, type);
84 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85 __ Fmov(trg_reg, res_reg);
86 }
87}
88
Roland Levillainec525fc2015-04-28 15:50:20 +010089static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010090 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010091 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080092}
93
94// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
95// call. This will copy the arguments into the positions for a regular call.
96//
97// Note: The actual parameters are required to be in the locations given by the invoke's location
98// summary. If an intrinsic modifies those locations before a slowpath call, they must be
99// restored!
100class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
101 public:
102 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
103
104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
106 __ Bind(GetEntryLabel());
107
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000108 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
Roland Levillainec525fc2015-04-28 15:50:20 +0100110 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800111
112 if (invoke_->IsInvokeStaticOrDirect()) {
113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister);
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000114 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800115 } else {
116 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
117 UNREACHABLE();
118 }
119
120 // Copy the result back to the expected output.
121 Location out = invoke_->GetLocations()->Out();
122 if (out.IsValid()) {
123 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
124 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
125 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
126 }
127
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000128 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800129 __ B(GetExitLabel());
130 }
131
132 private:
133 // The instruction where this slow path is happening.
134 HInvoke* const invoke_;
135
136 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
137};
138
139#undef __
140
141bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
142 Dispatch(invoke);
143 LocationSummary* res = invoke->GetLocations();
144 return res != nullptr && res->Intrinsified();
145}
146
147#define __ masm->
148
149static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
150 LocationSummary* locations = new (arena) LocationSummary(invoke,
151 LocationSummary::kNoCall,
152 kIntrinsified);
153 locations->SetInAt(0, Location::RequiresFpuRegister());
154 locations->SetOut(Location::RequiresRegister());
155}
156
157static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
158 LocationSummary* locations = new (arena) LocationSummary(invoke,
159 LocationSummary::kNoCall,
160 kIntrinsified);
161 locations->SetInAt(0, Location::RequiresRegister());
162 locations->SetOut(Location::RequiresFpuRegister());
163}
164
165static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
166 Location input = locations->InAt(0);
167 Location output = locations->Out();
168 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
169 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
170}
171
172static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
173 Location input = locations->InAt(0);
174 Location output = locations->Out();
175 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
176 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
177}
178
179void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
180 CreateFPToIntLocations(arena_, invoke);
181}
182void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
183 CreateIntToFPLocations(arena_, invoke);
184}
185
186void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
187 MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler());
188}
189void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
190 MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler());
191}
192
193void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
194 CreateFPToIntLocations(arena_, invoke);
195}
196void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
197 CreateIntToFPLocations(arena_, invoke);
198}
199
200void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
201 MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler());
202}
203void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
204 MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler());
205}
206
207static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
208 LocationSummary* locations = new (arena) LocationSummary(invoke,
209 LocationSummary::kNoCall,
210 kIntrinsified);
211 locations->SetInAt(0, Location::RequiresRegister());
212 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
213}
214
215static void GenReverseBytes(LocationSummary* locations,
216 Primitive::Type type,
217 vixl::MacroAssembler* masm) {
218 Location in = locations->InAt(0);
219 Location out = locations->Out();
220
221 switch (type) {
222 case Primitive::kPrimShort:
223 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
224 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
225 break;
226 case Primitive::kPrimInt:
227 case Primitive::kPrimLong:
228 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
229 break;
230 default:
231 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
232 UNREACHABLE();
233 }
234}
235
236void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
237 CreateIntToIntLocations(arena_, invoke);
238}
239
240void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
241 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
242}
243
244void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
245 CreateIntToIntLocations(arena_, invoke);
246}
247
248void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
249 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
250}
251
252void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
253 CreateIntToIntLocations(arena_, invoke);
254}
255
256void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
257 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
258}
259
260static void GenReverse(LocationSummary* locations,
261 Primitive::Type type,
262 vixl::MacroAssembler* masm) {
263 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
264
265 Location in = locations->InAt(0);
266 Location out = locations->Out();
267
268 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
269}
270
271void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
272 CreateIntToIntLocations(arena_, invoke);
273}
274
275void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
276 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
277}
278
279void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
280 CreateIntToIntLocations(arena_, invoke);
281}
282
283void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
284 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
285}
286
287static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800288 LocationSummary* locations = new (arena) LocationSummary(invoke,
289 LocationSummary::kNoCall,
290 kIntrinsified);
291 locations->SetInAt(0, Location::RequiresFpuRegister());
292 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
293}
294
295static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
296 Location in = locations->InAt(0);
297 Location out = locations->Out();
298
299 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
300 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
301
302 __ Fabs(out_reg, in_reg);
303}
304
305void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
306 CreateFPToFPLocations(arena_, invoke);
307}
308
309void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
310 MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler());
311}
312
313void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
314 CreateFPToFPLocations(arena_, invoke);
315}
316
317void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
318 MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler());
319}
320
321static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
322 LocationSummary* locations = new (arena) LocationSummary(invoke,
323 LocationSummary::kNoCall,
324 kIntrinsified);
325 locations->SetInAt(0, Location::RequiresRegister());
326 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
327}
328
329static void GenAbsInteger(LocationSummary* locations,
330 bool is64bit,
331 vixl::MacroAssembler* masm) {
332 Location in = locations->InAt(0);
333 Location output = locations->Out();
334
335 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
336 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
337
338 __ Cmp(in_reg, Operand(0));
339 __ Cneg(out_reg, in_reg, lt);
340}
341
342void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
343 CreateIntToInt(arena_, invoke);
344}
345
346void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
347 GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler());
348}
349
350void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
351 CreateIntToInt(arena_, invoke);
352}
353
354void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
355 GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler());
356}
357
358static void GenMinMaxFP(LocationSummary* locations,
359 bool is_min,
360 bool is_double,
361 vixl::MacroAssembler* masm) {
362 Location op1 = locations->InAt(0);
363 Location op2 = locations->InAt(1);
364 Location out = locations->Out();
365
366 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
367 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
368 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
369 if (is_min) {
370 __ Fmin(out_reg, op1_reg, op2_reg);
371 } else {
372 __ Fmax(out_reg, op1_reg, op2_reg);
373 }
374}
375
376static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
377 LocationSummary* locations = new (arena) LocationSummary(invoke,
378 LocationSummary::kNoCall,
379 kIntrinsified);
380 locations->SetInAt(0, Location::RequiresFpuRegister());
381 locations->SetInAt(1, Location::RequiresFpuRegister());
382 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
383}
384
385void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
386 CreateFPFPToFPLocations(arena_, invoke);
387}
388
389void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
390 GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler());
391}
392
393void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
394 CreateFPFPToFPLocations(arena_, invoke);
395}
396
397void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
398 GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler());
399}
400
401void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
402 CreateFPFPToFPLocations(arena_, invoke);
403}
404
405void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
406 GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler());
407}
408
409void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
410 CreateFPFPToFPLocations(arena_, invoke);
411}
412
413void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
414 GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler());
415}
416
417static void GenMinMax(LocationSummary* locations,
418 bool is_min,
419 bool is_long,
420 vixl::MacroAssembler* masm) {
421 Location op1 = locations->InAt(0);
422 Location op2 = locations->InAt(1);
423 Location out = locations->Out();
424
425 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
426 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
427 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
428
429 __ Cmp(op1_reg, op2_reg);
430 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
431}
432
433static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
434 LocationSummary* locations = new (arena) LocationSummary(invoke,
435 LocationSummary::kNoCall,
436 kIntrinsified);
437 locations->SetInAt(0, Location::RequiresRegister());
438 locations->SetInAt(1, Location::RequiresRegister());
439 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
440}
441
442void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
443 CreateIntIntToIntLocations(arena_, invoke);
444}
445
446void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
447 GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler());
448}
449
450void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
451 CreateIntIntToIntLocations(arena_, invoke);
452}
453
454void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
455 GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler());
456}
457
458void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
459 CreateIntIntToIntLocations(arena_, invoke);
460}
461
462void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
463 GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler());
464}
465
466void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
467 CreateIntIntToIntLocations(arena_, invoke);
468}
469
470void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
471 GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler());
472}
473
474void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
475 CreateFPToFPLocations(arena_, invoke);
476}
477
478void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
479 LocationSummary* locations = invoke->GetLocations();
480 vixl::MacroAssembler* masm = GetVIXLAssembler();
481 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
482}
483
484void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
485 CreateFPToFPLocations(arena_, invoke);
486}
487
488void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
489 LocationSummary* locations = invoke->GetLocations();
490 vixl::MacroAssembler* masm = GetVIXLAssembler();
491 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
492}
493
494void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
495 CreateFPToFPLocations(arena_, invoke);
496}
497
498void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
499 LocationSummary* locations = invoke->GetLocations();
500 vixl::MacroAssembler* masm = GetVIXLAssembler();
501 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
502}
503
504void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
505 CreateFPToFPLocations(arena_, invoke);
506}
507
508void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
509 LocationSummary* locations = invoke->GetLocations();
510 vixl::MacroAssembler* masm = GetVIXLAssembler();
511 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
512}
513
514static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
515 LocationSummary* locations = new (arena) LocationSummary(invoke,
516 LocationSummary::kNoCall,
517 kIntrinsified);
518 locations->SetInAt(0, Location::RequiresFpuRegister());
519 locations->SetOut(Location::RequiresRegister());
520}
521
522static void GenMathRound(LocationSummary* locations,
523 bool is_double,
524 vixl::MacroAssembler* masm) {
525 FPRegister in_reg = is_double ?
526 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
527 Register out_reg = is_double ?
528 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
529 UseScratchRegisterScope temps(masm);
530 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
531
532 // 0.5 can be encoded as an immediate, so use fmov.
533 if (is_double) {
534 __ Fmov(temp1_reg, static_cast<double>(0.5));
535 } else {
536 __ Fmov(temp1_reg, static_cast<float>(0.5));
537 }
538 __ Fadd(temp1_reg, in_reg, temp1_reg);
539 __ Fcvtms(out_reg, temp1_reg);
540}
541
542void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
543 CreateFPToIntPlusTempLocations(arena_, invoke);
544}
545
546void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
547 GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler());
548}
549
550void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
551 CreateFPToIntPlusTempLocations(arena_, invoke);
552}
553
554void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
555 GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler());
556}
557
558void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
559 CreateIntToIntLocations(arena_, invoke);
560}
561
562void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
563 vixl::MacroAssembler* masm = GetVIXLAssembler();
564 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
565 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
566}
567
568void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
569 CreateIntToIntLocations(arena_, invoke);
570}
571
572void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
573 vixl::MacroAssembler* masm = GetVIXLAssembler();
574 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
575 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
576}
577
578void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
579 CreateIntToIntLocations(arena_, invoke);
580}
581
582void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
583 vixl::MacroAssembler* masm = GetVIXLAssembler();
584 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
585 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
586}
587
588void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
589 CreateIntToIntLocations(arena_, invoke);
590}
591
592void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
593 vixl::MacroAssembler* masm = GetVIXLAssembler();
594 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
595 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
596}
597
598static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
599 LocationSummary* locations = new (arena) LocationSummary(invoke,
600 LocationSummary::kNoCall,
601 kIntrinsified);
602 locations->SetInAt(0, Location::RequiresRegister());
603 locations->SetInAt(1, Location::RequiresRegister());
604}
605
606void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
607 CreateIntIntToVoidLocations(arena_, invoke);
608}
609
610void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
611 vixl::MacroAssembler* masm = GetVIXLAssembler();
612 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
613 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
614}
615
616void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
617 CreateIntIntToVoidLocations(arena_, invoke);
618}
619
620void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
621 vixl::MacroAssembler* masm = GetVIXLAssembler();
622 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
623 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
624}
625
626void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
627 CreateIntIntToVoidLocations(arena_, invoke);
628}
629
630void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
631 vixl::MacroAssembler* masm = GetVIXLAssembler();
632 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
633 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
634}
635
636void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
637 CreateIntIntToVoidLocations(arena_, invoke);
638}
639
640void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
641 vixl::MacroAssembler* masm = GetVIXLAssembler();
642 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
643 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
644}
645
646void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
647 LocationSummary* locations = new (arena_) LocationSummary(invoke,
648 LocationSummary::kNoCall,
649 kIntrinsified);
650 locations->SetOut(Location::RequiresRegister());
651}
652
653void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
654 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
655 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
656}
657
658static void GenUnsafeGet(HInvoke* invoke,
659 Primitive::Type type,
660 bool is_volatile,
661 CodeGeneratorARM64* codegen) {
662 LocationSummary* locations = invoke->GetLocations();
663 DCHECK((type == Primitive::kPrimInt) ||
664 (type == Primitive::kPrimLong) ||
665 (type == Primitive::kPrimNot));
666 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
667 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
668 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
669 Register trg = RegisterFrom(locations->Out(), type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000670 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800671
672 MemOperand mem_op(base.X(), offset);
673 if (is_volatile) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000674 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800675 codegen->LoadAcquire(invoke, trg, mem_op);
676 } else {
677 codegen->Load(type, trg, mem_op);
678 __ Dmb(InnerShareable, BarrierReads);
679 }
680 } else {
681 codegen->Load(type, trg, mem_op);
682 }
683}
684
685static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
686 LocationSummary* locations = new (arena) LocationSummary(invoke,
687 LocationSummary::kNoCall,
688 kIntrinsified);
689 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
690 locations->SetInAt(1, Location::RequiresRegister());
691 locations->SetInAt(2, Location::RequiresRegister());
692 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
693}
694
695void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
696 CreateIntIntIntToIntLocations(arena_, invoke);
697}
698void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
699 CreateIntIntIntToIntLocations(arena_, invoke);
700}
701void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
702 CreateIntIntIntToIntLocations(arena_, invoke);
703}
704void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
705 CreateIntIntIntToIntLocations(arena_, invoke);
706}
707void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
708 CreateIntIntIntToIntLocations(arena_, invoke);
709}
710void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
711 CreateIntIntIntToIntLocations(arena_, invoke);
712}
713
714void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
715 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
716}
717void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
718 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
719}
720void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
721 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
722}
723void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
724 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
725}
726void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
727 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
728}
729void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
730 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
731}
732
733static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
734 LocationSummary* locations = new (arena) LocationSummary(invoke,
735 LocationSummary::kNoCall,
736 kIntrinsified);
737 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
738 locations->SetInAt(1, Location::RequiresRegister());
739 locations->SetInAt(2, Location::RequiresRegister());
740 locations->SetInAt(3, Location::RequiresRegister());
741}
742
743void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
744 CreateIntIntIntIntToVoid(arena_, invoke);
745}
746void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
747 CreateIntIntIntIntToVoid(arena_, invoke);
748}
749void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
750 CreateIntIntIntIntToVoid(arena_, invoke);
751}
752void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
753 CreateIntIntIntIntToVoid(arena_, invoke);
754}
755void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
756 CreateIntIntIntIntToVoid(arena_, invoke);
757}
758void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
759 CreateIntIntIntIntToVoid(arena_, invoke);
760}
761void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
762 CreateIntIntIntIntToVoid(arena_, invoke);
763}
764void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
765 CreateIntIntIntIntToVoid(arena_, invoke);
766}
767void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
768 CreateIntIntIntIntToVoid(arena_, invoke);
769}
770
771static void GenUnsafePut(LocationSummary* locations,
772 Primitive::Type type,
773 bool is_volatile,
774 bool is_ordered,
775 CodeGeneratorARM64* codegen) {
776 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
777
778 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
779 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
780 Register value = RegisterFrom(locations->InAt(3), type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000781 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800782
783 MemOperand mem_op(base.X(), offset);
784
785 if (is_volatile || is_ordered) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000786 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800787 codegen->StoreRelease(type, value, mem_op);
788 } else {
789 __ Dmb(InnerShareable, BarrierAll);
790 codegen->Store(type, value, mem_op);
791 if (is_volatile) {
792 __ Dmb(InnerShareable, BarrierReads);
793 }
794 }
795 } else {
796 codegen->Store(type, value, mem_op);
797 }
798
799 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100800 bool value_can_be_null = true; // TODO: Worth finding out this information?
801 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800802 }
803}
804
805void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
806 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
807}
808void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
809 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
810}
811void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
812 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
813}
814void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
815 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
816}
817void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
818 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
819}
820void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
821 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
822}
823void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
824 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
825}
826void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
827 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
828}
829void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
830 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
831}
832
833static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
834 LocationSummary* locations = new (arena) LocationSummary(invoke,
835 LocationSummary::kNoCall,
836 kIntrinsified);
837 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
838 locations->SetInAt(1, Location::RequiresRegister());
839 locations->SetInAt(2, Location::RequiresRegister());
840 locations->SetInAt(3, Location::RequiresRegister());
841 locations->SetInAt(4, Location::RequiresRegister());
842
843 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
844}
845
846static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000847 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800848 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
849
850 Register out = WRegisterFrom(locations->Out()); // Boolean result.
851
852 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
853 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
854 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
855 Register value = RegisterFrom(locations->InAt(4), type); // Value.
856
857 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
858 if (type == Primitive::kPrimNot) {
859 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100860 bool value_can_be_null = true; // TODO: Worth finding out this information?
861 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800862 }
863
864 UseScratchRegisterScope temps(masm);
865 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
866 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
867
868 Register tmp_32 = tmp_value.W();
869
870 __ Add(tmp_ptr, base.X(), Operand(offset));
871
872 // do {
873 // tmp_value = [tmp_ptr] - expected;
874 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
875 // result = tmp_value != 0;
876
877 vixl::Label loop_head, exit_loop;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000878 if (use_acquire_release) {
879 __ Bind(&loop_head);
880 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
881 __ Cmp(tmp_value, expected);
882 __ B(&exit_loop, ne);
883 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
884 __ Cbnz(tmp_32, &loop_head);
885 } else {
886 __ Dmb(InnerShareable, BarrierWrites);
887 __ Bind(&loop_head);
888 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
889 __ Cmp(tmp_value, expected);
890 __ B(&exit_loop, ne);
891 __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
892 __ Cbnz(tmp_32, &loop_head);
893 __ Dmb(InnerShareable, BarrierAll);
894 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800895 __ Bind(&exit_loop);
896 __ Cset(out, eq);
897}
898
899void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
900 CreateIntIntIntIntIntToInt(arena_, invoke);
901}
902void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
903 CreateIntIntIntIntIntToInt(arena_, invoke);
904}
905void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
906 CreateIntIntIntIntIntToInt(arena_, invoke);
907}
908
909void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
910 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
911}
912void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
913 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
914}
915void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
916 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
917}
918
919void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800920 LocationSummary* locations = new (arena_) LocationSummary(invoke,
921 LocationSummary::kCallOnSlowPath,
922 kIntrinsified);
923 locations->SetInAt(0, Location::RequiresRegister());
924 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +0000925 // In case we need to go in the slow path, we can't have the output be the same
926 // as the input: the current liveness analysis considers the input to be live
927 // at the point of the call.
928 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800929}
930
931void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
932 vixl::MacroAssembler* masm = GetVIXLAssembler();
933 LocationSummary* locations = invoke->GetLocations();
934
935 // Location of reference to data array
936 const MemberOffset value_offset = mirror::String::ValueOffset();
937 // Location of count
938 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800939
940 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
941 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
942 Register out = WRegisterFrom(locations->Out()); // Result character.
943
944 UseScratchRegisterScope temps(masm);
945 Register temp = temps.AcquireW();
946 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
947
948 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
949 // the cost.
950 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
951 // we will not optimize the code for constants (which would save a register).
952
953 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
954 codegen_->AddSlowPath(slow_path);
955
956 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
957 codegen_->MaybeRecordImplicitNullCheck(invoke);
958 __ Cmp(idx, temp);
959 __ B(hs, slow_path->GetEntryLabel());
960
Jeff Hao848f70a2014-01-15 13:49:50 -0800961 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800962
963 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -0800964 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -0800965
966 __ Bind(slow_path->GetExitLabel());
967}
968
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000969void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000970 LocationSummary* locations = new (arena_) LocationSummary(invoke,
971 LocationSummary::kCall,
972 kIntrinsified);
973 InvokeRuntimeCallingConvention calling_convention;
974 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
975 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
976 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
977}
978
979void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
980 vixl::MacroAssembler* masm = GetVIXLAssembler();
981 LocationSummary* locations = invoke->GetLocations();
982
Nicolas Geoffray512e04d2015-03-27 17:21:24 +0000983 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +0100984 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000985
986 Register argument = WRegisterFrom(locations->InAt(1));
987 __ Cmp(argument, 0);
988 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
989 codegen_->AddSlowPath(slow_path);
990 __ B(eq, slow_path->GetEntryLabel());
991
992 __ Ldr(
993 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
994 __ Blr(lr);
995 __ Bind(slow_path->GetExitLabel());
996}
997
Andreas Gampeba6fdbc2015-05-07 22:31:55 -0700998static void GenerateVisitStringIndexOf(HInvoke* invoke,
999 vixl::MacroAssembler* masm,
1000 CodeGeneratorARM64* codegen,
1001 ArenaAllocator* allocator,
1002 bool start_at_zero) {
1003 LocationSummary* locations = invoke->GetLocations();
1004 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
1005
1006 // Note that the null check must have been done earlier.
1007 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1008
1009 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1010 // or directly dispatch if we have a constant.
1011 SlowPathCodeARM64* slow_path = nullptr;
1012 if (invoke->InputAt(1)->IsIntConstant()) {
1013 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) {
1014 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1015 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1016 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1017 codegen->AddSlowPath(slow_path);
1018 __ B(slow_path->GetEntryLabel());
1019 __ Bind(slow_path->GetExitLabel());
1020 return;
1021 }
1022 } else {
1023 Register char_reg = WRegisterFrom(locations->InAt(1));
1024 __ Mov(tmp_reg, 0xFFFF);
1025 __ Cmp(char_reg, Operand(tmp_reg));
1026 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1027 codegen->AddSlowPath(slow_path);
1028 __ B(hi, slow_path->GetEntryLabel());
1029 }
1030
1031 if (start_at_zero) {
1032 // Start-index = 0.
1033 __ Mov(tmp_reg, 0);
1034 }
1035
1036 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
1037 __ Blr(lr);
1038
1039 if (slow_path != nullptr) {
1040 __ Bind(slow_path->GetExitLabel());
1041 }
1042}
1043
1044void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1045 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1046 LocationSummary::kCall,
1047 kIntrinsified);
1048 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1049 // best to align the inputs accordingly.
1050 InvokeRuntimeCallingConvention calling_convention;
1051 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1052 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1053 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1054
1055 // Need a temp for slow-path codepoint compare, and need to send start_index=0.
1056 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1057}
1058
1059void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
1060 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), true);
1061}
1062
1063void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1064 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1065 LocationSummary::kCall,
1066 kIntrinsified);
1067 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1068 // best to align the inputs accordingly.
1069 InvokeRuntimeCallingConvention calling_convention;
1070 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1071 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1072 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1073 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1074
1075 // Need a temp for slow-path codepoint compare.
1076 locations->AddTemp(Location::RequiresRegister());
1077}
1078
1079void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1080 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), false);
1081}
1082
Jeff Hao848f70a2014-01-15 13:49:50 -08001083void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1084 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1085 LocationSummary::kCall,
1086 kIntrinsified);
1087 InvokeRuntimeCallingConvention calling_convention;
1088 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1089 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1090 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1091 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1092 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1093}
1094
1095void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1096 vixl::MacroAssembler* masm = GetVIXLAssembler();
1097 LocationSummary* locations = invoke->GetLocations();
1098
1099 Register byte_array = WRegisterFrom(locations->InAt(0));
1100 __ Cmp(byte_array, 0);
1101 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1102 codegen_->AddSlowPath(slow_path);
1103 __ B(eq, slow_path->GetEntryLabel());
1104
1105 __ Ldr(lr,
1106 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
1107 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1108 __ Blr(lr);
1109 __ Bind(slow_path->GetExitLabel());
1110}
1111
1112void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1113 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1114 LocationSummary::kCall,
1115 kIntrinsified);
1116 InvokeRuntimeCallingConvention calling_convention;
1117 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1118 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1119 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1120 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1121}
1122
1123void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1124 vixl::MacroAssembler* masm = GetVIXLAssembler();
1125
1126 __ Ldr(lr,
1127 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
1128 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1129 __ Blr(lr);
1130}
1131
1132void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1133 // The inputs plus one temp.
1134 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1135 LocationSummary::kCall,
1136 kIntrinsified);
1137 InvokeRuntimeCallingConvention calling_convention;
1138 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1139 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1140 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1141 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1142}
1143
1144void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1145 vixl::MacroAssembler* masm = GetVIXLAssembler();
1146 LocationSummary* locations = invoke->GetLocations();
1147
1148 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1149 __ Cmp(string_to_copy, 0);
1150 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1151 codegen_->AddSlowPath(slow_path);
1152 __ B(eq, slow_path->GetEntryLabel());
1153
1154 __ Ldr(lr,
1155 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
1156 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1157 __ Blr(lr);
1158 __ Bind(slow_path->GetExitLabel());
1159}
1160
Andreas Gampe878d58c2015-01-15 23:24:00 -08001161// Unimplemented intrinsics.
1162
1163#define UNIMPLEMENTED_INTRINSIC(Name) \
1164void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1165} \
1166void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1167}
1168
1169UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001170UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001171UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001172
1173} // namespace arm64
1174} // namespace art