blob: 059abf090d28911b44485467ed7c73ab67ed4bcb [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Scott Wakeling9ee23f42015-07-23 10:44:35 +010044using helpers::OperandFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080045using helpers::RegisterFrom;
46using helpers::SRegisterFrom;
47using helpers::WRegisterFrom;
48using helpers::XRegisterFrom;
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53 return MemOperand(XRegisterFrom(location), offset);
54}
55
56} // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59 return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63 return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69 Primitive::Type type,
70 CodeGeneratorARM64* codegen) {
71 if (!trg.IsValid()) {
72 DCHECK(type == Primitive::kPrimVoid);
73 return;
74 }
75
76 DCHECK_NE(type, Primitive::kPrimVoid);
77
Jeff Hao848f70a2014-01-15 13:49:50 -080078 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080079 Register trg_reg = RegisterFrom(trg, type);
80 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82 } else {
83 FPRegister trg_reg = FPRegisterFrom(trg, type);
84 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85 __ Fmov(trg_reg, res_reg);
86 }
87}
88
Roland Levillainec525fc2015-04-28 15:50:20 +010089static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010090 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010091 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080092}
93
94// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
95// call. This will copy the arguments into the positions for a regular call.
96//
97// Note: The actual parameters are required to be in the locations given by the invoke's location
98// summary. If an intrinsic modifies those locations before a slowpath call, they must be
99// restored!
100class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
101 public:
102 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
103
104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
106 __ Bind(GetEntryLabel());
107
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000108 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
Roland Levillainec525fc2015-04-28 15:50:20 +0100110 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800111
112 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
114 LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800115 } else {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000116 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800117 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000118 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800119
120 // Copy the result back to the expected output.
121 Location out = invoke_->GetLocations()->Out();
122 if (out.IsValid()) {
123 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
124 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
125 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
126 }
127
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000128 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800129 __ B(GetExitLabel());
130 }
131
Alexandre Rames9931f312015-06-19 14:47:01 +0100132 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
133
Andreas Gampe878d58c2015-01-15 23:24:00 -0800134 private:
135 // The instruction where this slow path is happening.
136 HInvoke* const invoke_;
137
138 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
139};
140
141#undef __
142
143bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
144 Dispatch(invoke);
145 LocationSummary* res = invoke->GetLocations();
146 return res != nullptr && res->Intrinsified();
147}
148
149#define __ masm->
150
151static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
152 LocationSummary* locations = new (arena) LocationSummary(invoke,
153 LocationSummary::kNoCall,
154 kIntrinsified);
155 locations->SetInAt(0, Location::RequiresFpuRegister());
156 locations->SetOut(Location::RequiresRegister());
157}
158
159static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
160 LocationSummary* locations = new (arena) LocationSummary(invoke,
161 LocationSummary::kNoCall,
162 kIntrinsified);
163 locations->SetInAt(0, Location::RequiresRegister());
164 locations->SetOut(Location::RequiresFpuRegister());
165}
166
167static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
168 Location input = locations->InAt(0);
169 Location output = locations->Out();
170 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
171 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
172}
173
174static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
175 Location input = locations->InAt(0);
176 Location output = locations->Out();
177 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
178 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
179}
180
181void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
182 CreateFPToIntLocations(arena_, invoke);
183}
184void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
185 CreateIntToFPLocations(arena_, invoke);
186}
187
188void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
189 MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler());
190}
191void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
192 MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler());
193}
194
195void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
196 CreateFPToIntLocations(arena_, invoke);
197}
198void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
199 CreateIntToFPLocations(arena_, invoke);
200}
201
202void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
203 MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler());
204}
205void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
206 MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler());
207}
208
209static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
210 LocationSummary* locations = new (arena) LocationSummary(invoke,
211 LocationSummary::kNoCall,
212 kIntrinsified);
213 locations->SetInAt(0, Location::RequiresRegister());
214 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
215}
216
217static void GenReverseBytes(LocationSummary* locations,
218 Primitive::Type type,
219 vixl::MacroAssembler* masm) {
220 Location in = locations->InAt(0);
221 Location out = locations->Out();
222
223 switch (type) {
224 case Primitive::kPrimShort:
225 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
226 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
227 break;
228 case Primitive::kPrimInt:
229 case Primitive::kPrimLong:
230 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
231 break;
232 default:
233 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
234 UNREACHABLE();
235 }
236}
237
238void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
239 CreateIntToIntLocations(arena_, invoke);
240}
241
242void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
243 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
244}
245
246void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
247 CreateIntToIntLocations(arena_, invoke);
248}
249
250void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
251 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
252}
253
254void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
255 CreateIntToIntLocations(arena_, invoke);
256}
257
258void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
259 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
260}
261
Scott Wakeling611d3392015-07-10 11:42:06 +0100262static void GenNumberOfLeadingZeros(LocationSummary* locations,
263 Primitive::Type type,
264 vixl::MacroAssembler* masm) {
265 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
266
267 Location in = locations->InAt(0);
268 Location out = locations->Out();
269
270 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
271}
272
273void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
274 CreateIntToIntLocations(arena_, invoke);
275}
276
277void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
278 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
279}
280
281void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
282 CreateIntToIntLocations(arena_, invoke);
283}
284
285void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
286 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
287}
288
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100289static void GenNumberOfTrailingZeros(LocationSummary* locations,
290 Primitive::Type type,
291 vixl::MacroAssembler* masm) {
292 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
293
294 Location in = locations->InAt(0);
295 Location out = locations->Out();
296
297 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
298 __ Clz(RegisterFrom(out, type), RegisterFrom(out, type));
299}
300
301void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
302 CreateIntToIntLocations(arena_, invoke);
303}
304
305void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
306 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
307}
308
309void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
310 CreateIntToIntLocations(arena_, invoke);
311}
312
313void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
314 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
315}
316
317static void GenRotateRight(LocationSummary* locations,
318 Primitive::Type type,
319 vixl::MacroAssembler* masm) {
320 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
321
322 Location in = locations->InAt(0);
323 Location out = locations->Out();
324 Operand rhs = OperandFrom(locations->InAt(1), type);
325
326 if (rhs.IsImmediate()) {
327 uint32_t shift = rhs.immediate() & (RegisterFrom(in, type).SizeInBits() - 1);
328 __ Ror(RegisterFrom(out, type),
329 RegisterFrom(in, type),
330 shift);
331 } else {
332 DCHECK(rhs.shift() == vixl::LSL && rhs.shift_amount() == 0);
333 __ Ror(RegisterFrom(out, type),
334 RegisterFrom(in, type),
335 rhs.reg());
336 }
337}
338
339void IntrinsicLocationsBuilderARM64::VisitIntegerRotateRight(HInvoke* invoke) {
340 LocationSummary* locations = new (arena_) LocationSummary(invoke,
341 LocationSummary::kNoCall,
342 kIntrinsified);
343 locations->SetInAt(0, Location::RequiresRegister());
344 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
345 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
346}
347
348void IntrinsicCodeGeneratorARM64::VisitIntegerRotateRight(HInvoke* invoke) {
349 GenRotateRight(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
350}
351
352void IntrinsicLocationsBuilderARM64::VisitLongRotateRight(HInvoke* invoke) {
353 LocationSummary* locations = new (arena_) LocationSummary(invoke,
354 LocationSummary::kNoCall,
355 kIntrinsified);
356 locations->SetInAt(0, Location::RequiresRegister());
357 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
358 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
359}
360
361void IntrinsicCodeGeneratorARM64::VisitLongRotateRight(HInvoke* invoke) {
362 GenRotateRight(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
363}
364
365static void GenRotateLeft(LocationSummary* locations,
366 Primitive::Type type,
367 vixl::MacroAssembler* masm) {
368 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
369
370 Location in = locations->InAt(0);
371 Location out = locations->Out();
372 Operand rhs = OperandFrom(locations->InAt(1), type);
373
374 if (rhs.IsImmediate()) {
375 uint32_t regsize = RegisterFrom(in, type).SizeInBits();
376 uint32_t shift = (regsize - rhs.immediate()) & (regsize - 1);
377 __ Ror(RegisterFrom(out, type), RegisterFrom(in, type), shift);
378 } else {
379 DCHECK(rhs.shift() == vixl::LSL && rhs.shift_amount() == 0);
380 __ Neg(RegisterFrom(out, type),
381 Operand(RegisterFrom(locations->InAt(1), type)));
382 __ Ror(RegisterFrom(out, type),
383 RegisterFrom(in, type),
384 RegisterFrom(out, type));
385 }
386}
387
388void IntrinsicLocationsBuilderARM64::VisitIntegerRotateLeft(HInvoke* invoke) {
389 LocationSummary* locations = new (arena_) LocationSummary(invoke,
390 LocationSummary::kNoCall,
391 kIntrinsified);
392 locations->SetInAt(0, Location::RequiresRegister());
393 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
394 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
395}
396
397void IntrinsicCodeGeneratorARM64::VisitIntegerRotateLeft(HInvoke* invoke) {
398 GenRotateLeft(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
399}
400
401void IntrinsicLocationsBuilderARM64::VisitLongRotateLeft(HInvoke* invoke) {
402 LocationSummary* locations = new (arena_) LocationSummary(invoke,
403 LocationSummary::kNoCall,
404 kIntrinsified);
405 locations->SetInAt(0, Location::RequiresRegister());
406 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
407 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
408}
409
410void IntrinsicCodeGeneratorARM64::VisitLongRotateLeft(HInvoke* invoke) {
411 GenRotateLeft(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
412}
413
Andreas Gampe878d58c2015-01-15 23:24:00 -0800414static void GenReverse(LocationSummary* locations,
415 Primitive::Type type,
416 vixl::MacroAssembler* masm) {
417 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
418
419 Location in = locations->InAt(0);
420 Location out = locations->Out();
421
422 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
423}
424
425void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
426 CreateIntToIntLocations(arena_, invoke);
427}
428
429void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
430 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
431}
432
433void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
434 CreateIntToIntLocations(arena_, invoke);
435}
436
437void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
438 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
439}
440
441static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800442 LocationSummary* locations = new (arena) LocationSummary(invoke,
443 LocationSummary::kNoCall,
444 kIntrinsified);
445 locations->SetInAt(0, Location::RequiresFpuRegister());
446 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
447}
448
449static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
450 Location in = locations->InAt(0);
451 Location out = locations->Out();
452
453 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
454 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
455
456 __ Fabs(out_reg, in_reg);
457}
458
459void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
460 CreateFPToFPLocations(arena_, invoke);
461}
462
463void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
464 MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler());
465}
466
467void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
468 CreateFPToFPLocations(arena_, invoke);
469}
470
471void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
472 MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler());
473}
474
475static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
476 LocationSummary* locations = new (arena) LocationSummary(invoke,
477 LocationSummary::kNoCall,
478 kIntrinsified);
479 locations->SetInAt(0, Location::RequiresRegister());
480 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
481}
482
483static void GenAbsInteger(LocationSummary* locations,
484 bool is64bit,
485 vixl::MacroAssembler* masm) {
486 Location in = locations->InAt(0);
487 Location output = locations->Out();
488
489 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
490 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
491
492 __ Cmp(in_reg, Operand(0));
493 __ Cneg(out_reg, in_reg, lt);
494}
495
496void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
497 CreateIntToInt(arena_, invoke);
498}
499
500void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
501 GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler());
502}
503
504void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
505 CreateIntToInt(arena_, invoke);
506}
507
508void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
509 GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler());
510}
511
512static void GenMinMaxFP(LocationSummary* locations,
513 bool is_min,
514 bool is_double,
515 vixl::MacroAssembler* masm) {
516 Location op1 = locations->InAt(0);
517 Location op2 = locations->InAt(1);
518 Location out = locations->Out();
519
520 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
521 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
522 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
523 if (is_min) {
524 __ Fmin(out_reg, op1_reg, op2_reg);
525 } else {
526 __ Fmax(out_reg, op1_reg, op2_reg);
527 }
528}
529
530static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
531 LocationSummary* locations = new (arena) LocationSummary(invoke,
532 LocationSummary::kNoCall,
533 kIntrinsified);
534 locations->SetInAt(0, Location::RequiresFpuRegister());
535 locations->SetInAt(1, Location::RequiresFpuRegister());
536 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
537}
538
539void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
540 CreateFPFPToFPLocations(arena_, invoke);
541}
542
543void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
544 GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler());
545}
546
547void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
548 CreateFPFPToFPLocations(arena_, invoke);
549}
550
551void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
552 GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler());
553}
554
555void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
556 CreateFPFPToFPLocations(arena_, invoke);
557}
558
559void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
560 GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler());
561}
562
563void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
564 CreateFPFPToFPLocations(arena_, invoke);
565}
566
567void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
568 GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler());
569}
570
571static void GenMinMax(LocationSummary* locations,
572 bool is_min,
573 bool is_long,
574 vixl::MacroAssembler* masm) {
575 Location op1 = locations->InAt(0);
576 Location op2 = locations->InAt(1);
577 Location out = locations->Out();
578
579 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
580 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
581 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
582
583 __ Cmp(op1_reg, op2_reg);
584 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
585}
586
587static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
588 LocationSummary* locations = new (arena) LocationSummary(invoke,
589 LocationSummary::kNoCall,
590 kIntrinsified);
591 locations->SetInAt(0, Location::RequiresRegister());
592 locations->SetInAt(1, Location::RequiresRegister());
593 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
594}
595
596void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
597 CreateIntIntToIntLocations(arena_, invoke);
598}
599
600void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
601 GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler());
602}
603
604void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
605 CreateIntIntToIntLocations(arena_, invoke);
606}
607
608void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
609 GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler());
610}
611
612void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
613 CreateIntIntToIntLocations(arena_, invoke);
614}
615
616void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
617 GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler());
618}
619
620void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
621 CreateIntIntToIntLocations(arena_, invoke);
622}
623
624void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
625 GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler());
626}
627
628void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
629 CreateFPToFPLocations(arena_, invoke);
630}
631
632void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
633 LocationSummary* locations = invoke->GetLocations();
634 vixl::MacroAssembler* masm = GetVIXLAssembler();
635 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
636}
637
638void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
639 CreateFPToFPLocations(arena_, invoke);
640}
641
642void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
643 LocationSummary* locations = invoke->GetLocations();
644 vixl::MacroAssembler* masm = GetVIXLAssembler();
645 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
646}
647
648void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
649 CreateFPToFPLocations(arena_, invoke);
650}
651
652void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
653 LocationSummary* locations = invoke->GetLocations();
654 vixl::MacroAssembler* masm = GetVIXLAssembler();
655 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
656}
657
658void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
659 CreateFPToFPLocations(arena_, invoke);
660}
661
662void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
663 LocationSummary* locations = invoke->GetLocations();
664 vixl::MacroAssembler* masm = GetVIXLAssembler();
665 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
666}
667
668static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
669 LocationSummary* locations = new (arena) LocationSummary(invoke,
670 LocationSummary::kNoCall,
671 kIntrinsified);
672 locations->SetInAt(0, Location::RequiresFpuRegister());
673 locations->SetOut(Location::RequiresRegister());
674}
675
676static void GenMathRound(LocationSummary* locations,
677 bool is_double,
678 vixl::MacroAssembler* masm) {
679 FPRegister in_reg = is_double ?
680 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
681 Register out_reg = is_double ?
682 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
683 UseScratchRegisterScope temps(masm);
684 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
685
686 // 0.5 can be encoded as an immediate, so use fmov.
687 if (is_double) {
688 __ Fmov(temp1_reg, static_cast<double>(0.5));
689 } else {
690 __ Fmov(temp1_reg, static_cast<float>(0.5));
691 }
692 __ Fadd(temp1_reg, in_reg, temp1_reg);
693 __ Fcvtms(out_reg, temp1_reg);
694}
695
696void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
697 CreateFPToIntPlusTempLocations(arena_, invoke);
698}
699
700void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
701 GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler());
702}
703
704void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
705 CreateFPToIntPlusTempLocations(arena_, invoke);
706}
707
708void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
709 GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler());
710}
711
712void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
713 CreateIntToIntLocations(arena_, invoke);
714}
715
716void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
717 vixl::MacroAssembler* masm = GetVIXLAssembler();
718 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
719 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
720}
721
722void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
723 CreateIntToIntLocations(arena_, invoke);
724}
725
726void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
727 vixl::MacroAssembler* masm = GetVIXLAssembler();
728 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
729 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
730}
731
732void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
733 CreateIntToIntLocations(arena_, invoke);
734}
735
736void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
737 vixl::MacroAssembler* masm = GetVIXLAssembler();
738 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
739 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
740}
741
742void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
743 CreateIntToIntLocations(arena_, invoke);
744}
745
746void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
747 vixl::MacroAssembler* masm = GetVIXLAssembler();
748 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
749 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
750}
751
752static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
753 LocationSummary* locations = new (arena) LocationSummary(invoke,
754 LocationSummary::kNoCall,
755 kIntrinsified);
756 locations->SetInAt(0, Location::RequiresRegister());
757 locations->SetInAt(1, Location::RequiresRegister());
758}
759
760void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
761 CreateIntIntToVoidLocations(arena_, invoke);
762}
763
764void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
765 vixl::MacroAssembler* masm = GetVIXLAssembler();
766 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
767 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
768}
769
770void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
771 CreateIntIntToVoidLocations(arena_, invoke);
772}
773
774void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
775 vixl::MacroAssembler* masm = GetVIXLAssembler();
776 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
777 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
778}
779
780void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
781 CreateIntIntToVoidLocations(arena_, invoke);
782}
783
784void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
785 vixl::MacroAssembler* masm = GetVIXLAssembler();
786 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
787 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
788}
789
790void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
791 CreateIntIntToVoidLocations(arena_, invoke);
792}
793
794void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
795 vixl::MacroAssembler* masm = GetVIXLAssembler();
796 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
797 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
798}
799
800void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
801 LocationSummary* locations = new (arena_) LocationSummary(invoke,
802 LocationSummary::kNoCall,
803 kIntrinsified);
804 locations->SetOut(Location::RequiresRegister());
805}
806
807void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
808 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
809 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
810}
811
812static void GenUnsafeGet(HInvoke* invoke,
813 Primitive::Type type,
814 bool is_volatile,
815 CodeGeneratorARM64* codegen) {
816 LocationSummary* locations = invoke->GetLocations();
817 DCHECK((type == Primitive::kPrimInt) ||
818 (type == Primitive::kPrimLong) ||
819 (type == Primitive::kPrimNot));
820 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
821 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
822 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
823 Register trg = RegisterFrom(locations->Out(), type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000824 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800825
826 MemOperand mem_op(base.X(), offset);
827 if (is_volatile) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000828 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800829 codegen->LoadAcquire(invoke, trg, mem_op);
830 } else {
831 codegen->Load(type, trg, mem_op);
832 __ Dmb(InnerShareable, BarrierReads);
833 }
834 } else {
835 codegen->Load(type, trg, mem_op);
836 }
Roland Levillain4d027112015-07-01 15:41:14 +0100837
838 if (type == Primitive::kPrimNot) {
839 DCHECK(trg.IsW());
840 codegen->GetAssembler()->MaybeUnpoisonHeapReference(trg);
841 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800842}
843
844static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
845 LocationSummary* locations = new (arena) LocationSummary(invoke,
846 LocationSummary::kNoCall,
847 kIntrinsified);
848 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
849 locations->SetInAt(1, Location::RequiresRegister());
850 locations->SetInAt(2, Location::RequiresRegister());
851 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
852}
853
854void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
855 CreateIntIntIntToIntLocations(arena_, invoke);
856}
857void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
858 CreateIntIntIntToIntLocations(arena_, invoke);
859}
860void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
861 CreateIntIntIntToIntLocations(arena_, invoke);
862}
863void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
864 CreateIntIntIntToIntLocations(arena_, invoke);
865}
866void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
867 CreateIntIntIntToIntLocations(arena_, invoke);
868}
869void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
870 CreateIntIntIntToIntLocations(arena_, invoke);
871}
872
873void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
874 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
875}
876void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
877 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
878}
879void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
880 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
881}
882void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
883 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
884}
885void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
886 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
887}
888void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
889 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
890}
891
892static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
893 LocationSummary* locations = new (arena) LocationSummary(invoke,
894 LocationSummary::kNoCall,
895 kIntrinsified);
896 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
897 locations->SetInAt(1, Location::RequiresRegister());
898 locations->SetInAt(2, Location::RequiresRegister());
899 locations->SetInAt(3, Location::RequiresRegister());
900}
901
902void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
903 CreateIntIntIntIntToVoid(arena_, invoke);
904}
905void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
906 CreateIntIntIntIntToVoid(arena_, invoke);
907}
908void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
909 CreateIntIntIntIntToVoid(arena_, invoke);
910}
911void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
912 CreateIntIntIntIntToVoid(arena_, invoke);
913}
914void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
915 CreateIntIntIntIntToVoid(arena_, invoke);
916}
917void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
918 CreateIntIntIntIntToVoid(arena_, invoke);
919}
920void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
921 CreateIntIntIntIntToVoid(arena_, invoke);
922}
923void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
924 CreateIntIntIntIntToVoid(arena_, invoke);
925}
926void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
927 CreateIntIntIntIntToVoid(arena_, invoke);
928}
929
930static void GenUnsafePut(LocationSummary* locations,
931 Primitive::Type type,
932 bool is_volatile,
933 bool is_ordered,
934 CodeGeneratorARM64* codegen) {
935 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
936
937 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
938 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
939 Register value = RegisterFrom(locations->InAt(3), type);
Roland Levillain4d027112015-07-01 15:41:14 +0100940 Register source = value;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000941 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800942
943 MemOperand mem_op(base.X(), offset);
944
Roland Levillain4d027112015-07-01 15:41:14 +0100945 {
946 // We use a block to end the scratch scope before the write barrier, thus
947 // freeing the temporary registers so they can be used in `MarkGCCard`.
948 UseScratchRegisterScope temps(masm);
949
950 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
951 DCHECK(value.IsW());
952 Register temp = temps.AcquireW();
953 __ Mov(temp.W(), value.W());
954 codegen->GetAssembler()->PoisonHeapReference(temp.W());
955 source = temp;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800956 }
Roland Levillain4d027112015-07-01 15:41:14 +0100957
958 if (is_volatile || is_ordered) {
959 if (use_acquire_release) {
960 codegen->StoreRelease(type, source, mem_op);
961 } else {
962 __ Dmb(InnerShareable, BarrierAll);
963 codegen->Store(type, source, mem_op);
964 if (is_volatile) {
965 __ Dmb(InnerShareable, BarrierReads);
966 }
967 }
968 } else {
969 codegen->Store(type, source, mem_op);
970 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800971 }
972
973 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100974 bool value_can_be_null = true; // TODO: Worth finding out this information?
975 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800976 }
977}
978
979void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
980 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
981}
982void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
983 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
984}
985void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
986 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
987}
988void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
989 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
990}
991void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
992 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
993}
994void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
995 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
996}
997void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
998 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
999}
1000void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1001 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
1002}
1003void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1004 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
1005}
1006
1007static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
1008 LocationSummary* locations = new (arena) LocationSummary(invoke,
1009 LocationSummary::kNoCall,
1010 kIntrinsified);
1011 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1012 locations->SetInAt(1, Location::RequiresRegister());
1013 locations->SetInAt(2, Location::RequiresRegister());
1014 locations->SetInAt(3, Location::RequiresRegister());
1015 locations->SetInAt(4, Location::RequiresRegister());
1016
1017 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1018}
1019
1020static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Serban Constantinescu579885a2015-02-22 20:51:33 +00001021 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001022 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
1023
1024 Register out = WRegisterFrom(locations->Out()); // Boolean result.
1025
1026 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
1027 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
1028 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
1029 Register value = RegisterFrom(locations->InAt(4), type); // Value.
1030
1031 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
1032 if (type == Primitive::kPrimNot) {
1033 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001034 bool value_can_be_null = true; // TODO: Worth finding out this information?
1035 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001036 }
1037
1038 UseScratchRegisterScope temps(masm);
1039 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1040 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
1041
1042 Register tmp_32 = tmp_value.W();
1043
1044 __ Add(tmp_ptr, base.X(), Operand(offset));
1045
Roland Levillain4d027112015-07-01 15:41:14 +01001046 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1047 codegen->GetAssembler()->PoisonHeapReference(expected);
1048 codegen->GetAssembler()->PoisonHeapReference(value);
1049 }
1050
Andreas Gampe878d58c2015-01-15 23:24:00 -08001051 // do {
1052 // tmp_value = [tmp_ptr] - expected;
1053 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1054 // result = tmp_value != 0;
1055
1056 vixl::Label loop_head, exit_loop;
Serban Constantinescu579885a2015-02-22 20:51:33 +00001057 if (use_acquire_release) {
1058 __ Bind(&loop_head);
1059 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
1060 __ Cmp(tmp_value, expected);
1061 __ B(&exit_loop, ne);
1062 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
1063 __ Cbnz(tmp_32, &loop_head);
1064 } else {
1065 __ Dmb(InnerShareable, BarrierWrites);
1066 __ Bind(&loop_head);
1067 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
1068 __ Cmp(tmp_value, expected);
1069 __ B(&exit_loop, ne);
1070 __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
1071 __ Cbnz(tmp_32, &loop_head);
1072 __ Dmb(InnerShareable, BarrierAll);
1073 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001074 __ Bind(&exit_loop);
1075 __ Cset(out, eq);
Roland Levillain4d027112015-07-01 15:41:14 +01001076
1077 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1078 codegen->GetAssembler()->UnpoisonHeapReference(value);
1079 codegen->GetAssembler()->UnpoisonHeapReference(expected);
1080 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001081}
1082
1083void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1084 CreateIntIntIntIntIntToInt(arena_, invoke);
1085}
1086void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1087 CreateIntIntIntIntIntToInt(arena_, invoke);
1088}
1089void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain985ff702015-10-23 13:25:35 +01001090 // The UnsafeCASObject intrinsic does not always work when heap
1091 // poisoning is enabled (it breaks run-test 004-UnsafeTest); turn it
1092 // off temporarily as a quick fix.
1093 // TODO(rpl): Fix it and turn it back on.
1094 if (kPoisonHeapReferences) {
1095 return;
1096 }
1097
Andreas Gampe878d58c2015-01-15 23:24:00 -08001098 CreateIntIntIntIntIntToInt(arena_, invoke);
1099}
1100
1101void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1102 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1103}
1104void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1105 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
1106}
1107void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
1108 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1109}
1110
1111void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001112 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1113 LocationSummary::kCallOnSlowPath,
1114 kIntrinsified);
1115 locations->SetInAt(0, Location::RequiresRegister());
1116 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +00001117 // In case we need to go in the slow path, we can't have the output be the same
1118 // as the input: the current liveness analysis considers the input to be live
1119 // at the point of the call.
1120 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001121}
1122
1123void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
1124 vixl::MacroAssembler* masm = GetVIXLAssembler();
1125 LocationSummary* locations = invoke->GetLocations();
1126
1127 // Location of reference to data array
1128 const MemberOffset value_offset = mirror::String::ValueOffset();
1129 // Location of count
1130 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001131
1132 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
1133 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
1134 Register out = WRegisterFrom(locations->Out()); // Result character.
1135
1136 UseScratchRegisterScope temps(masm);
1137 Register temp = temps.AcquireW();
1138 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
1139
1140 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1141 // the cost.
1142 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1143 // we will not optimize the code for constants (which would save a register).
1144
1145 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1146 codegen_->AddSlowPath(slow_path);
1147
1148 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
1149 codegen_->MaybeRecordImplicitNullCheck(invoke);
1150 __ Cmp(idx, temp);
1151 __ B(hs, slow_path->GetEntryLabel());
1152
Jeff Hao848f70a2014-01-15 13:49:50 -08001153 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001154
1155 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -08001156 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -08001157
1158 __ Bind(slow_path->GetExitLabel());
1159}
1160
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001161void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001162 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1163 LocationSummary::kCall,
1164 kIntrinsified);
1165 InvokeRuntimeCallingConvention calling_convention;
1166 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1167 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1168 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1169}
1170
1171void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
1172 vixl::MacroAssembler* masm = GetVIXLAssembler();
1173 LocationSummary* locations = invoke->GetLocations();
1174
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001175 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001176 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001177
1178 Register argument = WRegisterFrom(locations->InAt(1));
1179 __ Cmp(argument, 0);
1180 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1181 codegen_->AddSlowPath(slow_path);
1182 __ B(eq, slow_path->GetEntryLabel());
1183
1184 __ Ldr(
1185 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
1186 __ Blr(lr);
1187 __ Bind(slow_path->GetExitLabel());
1188}
1189
Agi Csakiea34b402015-08-13 17:51:19 -07001190void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
1191 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1192 LocationSummary::kNoCall,
1193 kIntrinsified);
1194 locations->SetInAt(0, Location::RequiresRegister());
1195 locations->SetInAt(1, Location::RequiresRegister());
1196 // Temporary registers to store lengths of strings and for calculations.
1197 locations->AddTemp(Location::RequiresRegister());
1198 locations->AddTemp(Location::RequiresRegister());
1199
1200 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1201}
1202
1203void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
1204 vixl::MacroAssembler* masm = GetVIXLAssembler();
1205 LocationSummary* locations = invoke->GetLocations();
1206
1207 Register str = WRegisterFrom(locations->InAt(0));
1208 Register arg = WRegisterFrom(locations->InAt(1));
1209 Register out = XRegisterFrom(locations->Out());
1210
1211 UseScratchRegisterScope scratch_scope(masm);
1212 Register temp = scratch_scope.AcquireW();
1213 Register temp1 = WRegisterFrom(locations->GetTemp(0));
1214 Register temp2 = WRegisterFrom(locations->GetTemp(1));
1215
1216 vixl::Label loop;
1217 vixl::Label end;
1218 vixl::Label return_true;
1219 vixl::Label return_false;
1220
1221 // Get offsets of count, value, and class fields within a string object.
1222 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1223 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1224 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1225
1226 // Note that the null check must have been done earlier.
1227 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1228
1229 // Check if input is null, return false if it is.
1230 __ Cbz(arg, &return_false);
1231
1232 // Reference equality check, return true if same reference.
1233 __ Cmp(str, arg);
1234 __ B(&return_true, eq);
1235
1236 // Instanceof check for the argument by comparing class fields.
1237 // All string objects must have the same type since String cannot be subclassed.
1238 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1239 // If the argument is a string object, its class field must be equal to receiver's class field.
1240 __ Ldr(temp, MemOperand(str.X(), class_offset));
1241 __ Ldr(temp1, MemOperand(arg.X(), class_offset));
1242 __ Cmp(temp, temp1);
1243 __ B(&return_false, ne);
1244
1245 // Load lengths of this and argument strings.
1246 __ Ldr(temp, MemOperand(str.X(), count_offset));
1247 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1248 // Check if lengths are equal, return false if they're not.
1249 __ Cmp(temp, temp1);
1250 __ B(&return_false, ne);
1251 // Store offset of string value in preparation for comparison loop
1252 __ Mov(temp1, value_offset);
1253 // Return true if both strings are empty.
1254 __ Cbz(temp, &return_true);
1255
1256 // Assertions that must hold in order to compare strings 4 characters at a time.
1257 DCHECK_ALIGNED(value_offset, 8);
1258 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1259
1260 temp1 = temp1.X();
1261 temp2 = temp2.X();
1262
1263 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1264 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1265 __ Bind(&loop);
1266 __ Ldr(out, MemOperand(str.X(), temp1));
1267 __ Ldr(temp2, MemOperand(arg.X(), temp1));
1268 __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
1269 __ Cmp(out, temp2);
1270 __ B(&return_false, ne);
1271 __ Sub(temp, temp, Operand(4), SetFlags);
1272 __ B(&loop, gt);
1273
1274 // Return true and exit the function.
1275 // If loop does not result in returning false, we return true.
1276 __ Bind(&return_true);
1277 __ Mov(out, 1);
1278 __ B(&end);
1279
1280 // Return false and exit the function.
1281 __ Bind(&return_false);
1282 __ Mov(out, 0);
1283 __ Bind(&end);
1284}
1285
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001286static void GenerateVisitStringIndexOf(HInvoke* invoke,
1287 vixl::MacroAssembler* masm,
1288 CodeGeneratorARM64* codegen,
1289 ArenaAllocator* allocator,
1290 bool start_at_zero) {
1291 LocationSummary* locations = invoke->GetLocations();
1292 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
1293
1294 // Note that the null check must have been done earlier.
1295 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1296
1297 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1298 // or directly dispatch if we have a constant.
1299 SlowPathCodeARM64* slow_path = nullptr;
1300 if (invoke->InputAt(1)->IsIntConstant()) {
1301 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) {
1302 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1303 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1304 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1305 codegen->AddSlowPath(slow_path);
1306 __ B(slow_path->GetEntryLabel());
1307 __ Bind(slow_path->GetExitLabel());
1308 return;
1309 }
1310 } else {
1311 Register char_reg = WRegisterFrom(locations->InAt(1));
1312 __ Mov(tmp_reg, 0xFFFF);
1313 __ Cmp(char_reg, Operand(tmp_reg));
1314 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1315 codegen->AddSlowPath(slow_path);
1316 __ B(hi, slow_path->GetEntryLabel());
1317 }
1318
1319 if (start_at_zero) {
1320 // Start-index = 0.
1321 __ Mov(tmp_reg, 0);
1322 }
1323
1324 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
1325 __ Blr(lr);
1326
1327 if (slow_path != nullptr) {
1328 __ Bind(slow_path->GetExitLabel());
1329 }
1330}
1331
1332void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1333 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1334 LocationSummary::kCall,
1335 kIntrinsified);
1336 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1337 // best to align the inputs accordingly.
1338 InvokeRuntimeCallingConvention calling_convention;
1339 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1340 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1341 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1342
1343 // Need a temp for slow-path codepoint compare, and need to send start_index=0.
1344 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1345}
1346
1347void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
1348 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), true);
1349}
1350
1351void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1352 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1353 LocationSummary::kCall,
1354 kIntrinsified);
1355 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1356 // best to align the inputs accordingly.
1357 InvokeRuntimeCallingConvention calling_convention;
1358 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1359 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1360 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1361 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1362
1363 // Need a temp for slow-path codepoint compare.
1364 locations->AddTemp(Location::RequiresRegister());
1365}
1366
1367void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1368 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), false);
1369}
1370
Jeff Hao848f70a2014-01-15 13:49:50 -08001371void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1372 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1373 LocationSummary::kCall,
1374 kIntrinsified);
1375 InvokeRuntimeCallingConvention calling_convention;
1376 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1377 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1378 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1379 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1380 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1381}
1382
1383void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1384 vixl::MacroAssembler* masm = GetVIXLAssembler();
1385 LocationSummary* locations = invoke->GetLocations();
1386
1387 Register byte_array = WRegisterFrom(locations->InAt(0));
1388 __ Cmp(byte_array, 0);
1389 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1390 codegen_->AddSlowPath(slow_path);
1391 __ B(eq, slow_path->GetEntryLabel());
1392
1393 __ Ldr(lr,
1394 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
1395 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1396 __ Blr(lr);
1397 __ Bind(slow_path->GetExitLabel());
1398}
1399
1400void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1401 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1402 LocationSummary::kCall,
1403 kIntrinsified);
1404 InvokeRuntimeCallingConvention calling_convention;
1405 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1406 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1407 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1408 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1409}
1410
1411void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1412 vixl::MacroAssembler* masm = GetVIXLAssembler();
1413
1414 __ Ldr(lr,
1415 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
1416 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1417 __ Blr(lr);
1418}
1419
1420void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1421 // The inputs plus one temp.
1422 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1423 LocationSummary::kCall,
1424 kIntrinsified);
1425 InvokeRuntimeCallingConvention calling_convention;
1426 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1427 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1428 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1429 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1430}
1431
1432void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1433 vixl::MacroAssembler* masm = GetVIXLAssembler();
1434 LocationSummary* locations = invoke->GetLocations();
1435
1436 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1437 __ Cmp(string_to_copy, 0);
1438 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1439 codegen_->AddSlowPath(slow_path);
1440 __ B(eq, slow_path->GetEntryLabel());
1441
1442 __ Ldr(lr,
1443 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
1444 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1445 __ Blr(lr);
1446 __ Bind(slow_path->GetExitLabel());
1447}
1448
Andreas Gampe878d58c2015-01-15 23:24:00 -08001449// Unimplemented intrinsics.
1450
1451#define UNIMPLEMENTED_INTRINSIC(Name) \
1452void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1453} \
1454void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1455}
1456
1457UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +01001458UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001459UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001460UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001461
Roland Levillain4d027112015-07-01 15:41:14 +01001462#undef UNIMPLEMENTED_INTRINSIC
1463
1464#undef __
1465
Andreas Gampe878d58c2015-01-15 23:24:00 -08001466} // namespace arm64
1467} // namespace art