blob: 5efa01e1da40a9c2b634d6178ce86252bf5ef146 [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Scott Wakeling9ee23f42015-07-23 10:44:35 +010044using helpers::OperandFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080045using helpers::RegisterFrom;
46using helpers::SRegisterFrom;
47using helpers::WRegisterFrom;
48using helpers::XRegisterFrom;
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53 return MemOperand(XRegisterFrom(location), offset);
54}
55
56} // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59 return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63 return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69 Primitive::Type type,
70 CodeGeneratorARM64* codegen) {
71 if (!trg.IsValid()) {
72 DCHECK(type == Primitive::kPrimVoid);
73 return;
74 }
75
76 DCHECK_NE(type, Primitive::kPrimVoid);
77
Jeff Hao848f70a2014-01-15 13:49:50 -080078 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080079 Register trg_reg = RegisterFrom(trg, type);
80 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82 } else {
83 FPRegister trg_reg = FPRegisterFrom(trg, type);
84 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85 __ Fmov(trg_reg, res_reg);
86 }
87}
88
Roland Levillainec525fc2015-04-28 15:50:20 +010089static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010090 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010091 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080092}
93
94// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
95// call. This will copy the arguments into the positions for a regular call.
96//
97// Note: The actual parameters are required to be in the locations given by the invoke's location
98// summary. If an intrinsic modifies those locations before a slowpath call, they must be
99// restored!
100class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
101 public:
102 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
103
104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
106 __ Bind(GetEntryLabel());
107
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000108 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
Roland Levillainec525fc2015-04-28 15:50:20 +0100110 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800111
112 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
114 LocationFrom(kArtMethodRegister));
Serban Constantinescu4ab02352015-08-12 11:52:22 +0100115 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800116 } else {
117 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
118 UNREACHABLE();
119 }
120
121 // Copy the result back to the expected output.
122 Location out = invoke_->GetLocations()->Out();
123 if (out.IsValid()) {
124 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
125 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
126 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
127 }
128
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000129 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800130 __ B(GetExitLabel());
131 }
132
Alexandre Rames9931f312015-06-19 14:47:01 +0100133 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
134
Andreas Gampe878d58c2015-01-15 23:24:00 -0800135 private:
136 // The instruction where this slow path is happening.
137 HInvoke* const invoke_;
138
139 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
140};
141
142#undef __
143
144bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
145 Dispatch(invoke);
146 LocationSummary* res = invoke->GetLocations();
147 return res != nullptr && res->Intrinsified();
148}
149
150#define __ masm->
151
152static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
153 LocationSummary* locations = new (arena) LocationSummary(invoke,
154 LocationSummary::kNoCall,
155 kIntrinsified);
156 locations->SetInAt(0, Location::RequiresFpuRegister());
157 locations->SetOut(Location::RequiresRegister());
158}
159
160static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
161 LocationSummary* locations = new (arena) LocationSummary(invoke,
162 LocationSummary::kNoCall,
163 kIntrinsified);
164 locations->SetInAt(0, Location::RequiresRegister());
165 locations->SetOut(Location::RequiresFpuRegister());
166}
167
168static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
169 Location input = locations->InAt(0);
170 Location output = locations->Out();
171 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
172 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
173}
174
175static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
176 Location input = locations->InAt(0);
177 Location output = locations->Out();
178 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
179 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
180}
181
182void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
183 CreateFPToIntLocations(arena_, invoke);
184}
185void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
186 CreateIntToFPLocations(arena_, invoke);
187}
188
189void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
190 MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler());
191}
192void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
193 MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler());
194}
195
196void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
197 CreateFPToIntLocations(arena_, invoke);
198}
199void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
200 CreateIntToFPLocations(arena_, invoke);
201}
202
203void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
204 MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler());
205}
206void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
207 MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler());
208}
209
210static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
211 LocationSummary* locations = new (arena) LocationSummary(invoke,
212 LocationSummary::kNoCall,
213 kIntrinsified);
214 locations->SetInAt(0, Location::RequiresRegister());
215 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
216}
217
218static void GenReverseBytes(LocationSummary* locations,
219 Primitive::Type type,
220 vixl::MacroAssembler* masm) {
221 Location in = locations->InAt(0);
222 Location out = locations->Out();
223
224 switch (type) {
225 case Primitive::kPrimShort:
226 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
227 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
228 break;
229 case Primitive::kPrimInt:
230 case Primitive::kPrimLong:
231 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
232 break;
233 default:
234 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
235 UNREACHABLE();
236 }
237}
238
239void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
240 CreateIntToIntLocations(arena_, invoke);
241}
242
243void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
244 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
245}
246
247void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
248 CreateIntToIntLocations(arena_, invoke);
249}
250
251void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
252 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
253}
254
255void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
256 CreateIntToIntLocations(arena_, invoke);
257}
258
259void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
260 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
261}
262
Scott Wakeling611d3392015-07-10 11:42:06 +0100263static void GenNumberOfLeadingZeros(LocationSummary* locations,
264 Primitive::Type type,
265 vixl::MacroAssembler* masm) {
266 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
267
268 Location in = locations->InAt(0);
269 Location out = locations->Out();
270
271 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
272}
273
274void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
275 CreateIntToIntLocations(arena_, invoke);
276}
277
278void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
279 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
280}
281
282void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
283 CreateIntToIntLocations(arena_, invoke);
284}
285
286void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
287 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
288}
289
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100290static void GenNumberOfTrailingZeros(LocationSummary* locations,
291 Primitive::Type type,
292 vixl::MacroAssembler* masm) {
293 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
294
295 Location in = locations->InAt(0);
296 Location out = locations->Out();
297
298 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
299 __ Clz(RegisterFrom(out, type), RegisterFrom(out, type));
300}
301
302void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
303 CreateIntToIntLocations(arena_, invoke);
304}
305
306void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
307 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
308}
309
310void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
311 CreateIntToIntLocations(arena_, invoke);
312}
313
314void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
315 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
316}
317
318static void GenRotateRight(LocationSummary* locations,
319 Primitive::Type type,
320 vixl::MacroAssembler* masm) {
321 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
322
323 Location in = locations->InAt(0);
324 Location out = locations->Out();
325 Operand rhs = OperandFrom(locations->InAt(1), type);
326
327 if (rhs.IsImmediate()) {
328 uint32_t shift = rhs.immediate() & (RegisterFrom(in, type).SizeInBits() - 1);
329 __ Ror(RegisterFrom(out, type),
330 RegisterFrom(in, type),
331 shift);
332 } else {
333 DCHECK(rhs.shift() == vixl::LSL && rhs.shift_amount() == 0);
334 __ Ror(RegisterFrom(out, type),
335 RegisterFrom(in, type),
336 rhs.reg());
337 }
338}
339
340void IntrinsicLocationsBuilderARM64::VisitIntegerRotateRight(HInvoke* invoke) {
341 LocationSummary* locations = new (arena_) LocationSummary(invoke,
342 LocationSummary::kNoCall,
343 kIntrinsified);
344 locations->SetInAt(0, Location::RequiresRegister());
345 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
346 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
347}
348
349void IntrinsicCodeGeneratorARM64::VisitIntegerRotateRight(HInvoke* invoke) {
350 GenRotateRight(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
351}
352
353void IntrinsicLocationsBuilderARM64::VisitLongRotateRight(HInvoke* invoke) {
354 LocationSummary* locations = new (arena_) LocationSummary(invoke,
355 LocationSummary::kNoCall,
356 kIntrinsified);
357 locations->SetInAt(0, Location::RequiresRegister());
358 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
359 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
360}
361
362void IntrinsicCodeGeneratorARM64::VisitLongRotateRight(HInvoke* invoke) {
363 GenRotateRight(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
364}
365
366static void GenRotateLeft(LocationSummary* locations,
367 Primitive::Type type,
368 vixl::MacroAssembler* masm) {
369 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
370
371 Location in = locations->InAt(0);
372 Location out = locations->Out();
373 Operand rhs = OperandFrom(locations->InAt(1), type);
374
375 if (rhs.IsImmediate()) {
376 uint32_t regsize = RegisterFrom(in, type).SizeInBits();
377 uint32_t shift = (regsize - rhs.immediate()) & (regsize - 1);
378 __ Ror(RegisterFrom(out, type), RegisterFrom(in, type), shift);
379 } else {
380 DCHECK(rhs.shift() == vixl::LSL && rhs.shift_amount() == 0);
381 __ Neg(RegisterFrom(out, type),
382 Operand(RegisterFrom(locations->InAt(1), type)));
383 __ Ror(RegisterFrom(out, type),
384 RegisterFrom(in, type),
385 RegisterFrom(out, type));
386 }
387}
388
389void IntrinsicLocationsBuilderARM64::VisitIntegerRotateLeft(HInvoke* invoke) {
390 LocationSummary* locations = new (arena_) LocationSummary(invoke,
391 LocationSummary::kNoCall,
392 kIntrinsified);
393 locations->SetInAt(0, Location::RequiresRegister());
394 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
395 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
396}
397
398void IntrinsicCodeGeneratorARM64::VisitIntegerRotateLeft(HInvoke* invoke) {
399 GenRotateLeft(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
400}
401
402void IntrinsicLocationsBuilderARM64::VisitLongRotateLeft(HInvoke* invoke) {
403 LocationSummary* locations = new (arena_) LocationSummary(invoke,
404 LocationSummary::kNoCall,
405 kIntrinsified);
406 locations->SetInAt(0, Location::RequiresRegister());
407 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
408 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
409}
410
411void IntrinsicCodeGeneratorARM64::VisitLongRotateLeft(HInvoke* invoke) {
412 GenRotateLeft(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
413}
414
Andreas Gampe878d58c2015-01-15 23:24:00 -0800415static void GenReverse(LocationSummary* locations,
416 Primitive::Type type,
417 vixl::MacroAssembler* masm) {
418 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
419
420 Location in = locations->InAt(0);
421 Location out = locations->Out();
422
423 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
424}
425
426void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
427 CreateIntToIntLocations(arena_, invoke);
428}
429
430void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
431 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
432}
433
434void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
435 CreateIntToIntLocations(arena_, invoke);
436}
437
438void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
439 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
440}
441
442static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800443 LocationSummary* locations = new (arena) LocationSummary(invoke,
444 LocationSummary::kNoCall,
445 kIntrinsified);
446 locations->SetInAt(0, Location::RequiresFpuRegister());
447 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
448}
449
450static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
451 Location in = locations->InAt(0);
452 Location out = locations->Out();
453
454 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
455 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
456
457 __ Fabs(out_reg, in_reg);
458}
459
460void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
461 CreateFPToFPLocations(arena_, invoke);
462}
463
464void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
465 MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler());
466}
467
468void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
469 CreateFPToFPLocations(arena_, invoke);
470}
471
472void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
473 MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler());
474}
475
476static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
477 LocationSummary* locations = new (arena) LocationSummary(invoke,
478 LocationSummary::kNoCall,
479 kIntrinsified);
480 locations->SetInAt(0, Location::RequiresRegister());
481 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
482}
483
484static void GenAbsInteger(LocationSummary* locations,
485 bool is64bit,
486 vixl::MacroAssembler* masm) {
487 Location in = locations->InAt(0);
488 Location output = locations->Out();
489
490 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
491 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
492
493 __ Cmp(in_reg, Operand(0));
494 __ Cneg(out_reg, in_reg, lt);
495}
496
497void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
498 CreateIntToInt(arena_, invoke);
499}
500
501void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
502 GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler());
503}
504
505void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
506 CreateIntToInt(arena_, invoke);
507}
508
509void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
510 GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler());
511}
512
513static void GenMinMaxFP(LocationSummary* locations,
514 bool is_min,
515 bool is_double,
516 vixl::MacroAssembler* masm) {
517 Location op1 = locations->InAt(0);
518 Location op2 = locations->InAt(1);
519 Location out = locations->Out();
520
521 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
522 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
523 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
524 if (is_min) {
525 __ Fmin(out_reg, op1_reg, op2_reg);
526 } else {
527 __ Fmax(out_reg, op1_reg, op2_reg);
528 }
529}
530
531static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
532 LocationSummary* locations = new (arena) LocationSummary(invoke,
533 LocationSummary::kNoCall,
534 kIntrinsified);
535 locations->SetInAt(0, Location::RequiresFpuRegister());
536 locations->SetInAt(1, Location::RequiresFpuRegister());
537 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
538}
539
540void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
541 CreateFPFPToFPLocations(arena_, invoke);
542}
543
544void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
545 GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler());
546}
547
548void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
549 CreateFPFPToFPLocations(arena_, invoke);
550}
551
552void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
553 GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler());
554}
555
556void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
557 CreateFPFPToFPLocations(arena_, invoke);
558}
559
560void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
561 GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler());
562}
563
564void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
565 CreateFPFPToFPLocations(arena_, invoke);
566}
567
568void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
569 GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler());
570}
571
572static void GenMinMax(LocationSummary* locations,
573 bool is_min,
574 bool is_long,
575 vixl::MacroAssembler* masm) {
576 Location op1 = locations->InAt(0);
577 Location op2 = locations->InAt(1);
578 Location out = locations->Out();
579
580 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
581 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
582 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
583
584 __ Cmp(op1_reg, op2_reg);
585 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
586}
587
588static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
589 LocationSummary* locations = new (arena) LocationSummary(invoke,
590 LocationSummary::kNoCall,
591 kIntrinsified);
592 locations->SetInAt(0, Location::RequiresRegister());
593 locations->SetInAt(1, Location::RequiresRegister());
594 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
595}
596
597void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
598 CreateIntIntToIntLocations(arena_, invoke);
599}
600
601void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
602 GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler());
603}
604
605void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
606 CreateIntIntToIntLocations(arena_, invoke);
607}
608
609void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
610 GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler());
611}
612
613void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
614 CreateIntIntToIntLocations(arena_, invoke);
615}
616
617void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
618 GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler());
619}
620
621void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
622 CreateIntIntToIntLocations(arena_, invoke);
623}
624
625void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
626 GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler());
627}
628
629void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
630 CreateFPToFPLocations(arena_, invoke);
631}
632
633void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
634 LocationSummary* locations = invoke->GetLocations();
635 vixl::MacroAssembler* masm = GetVIXLAssembler();
636 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
637}
638
639void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
640 CreateFPToFPLocations(arena_, invoke);
641}
642
643void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
644 LocationSummary* locations = invoke->GetLocations();
645 vixl::MacroAssembler* masm = GetVIXLAssembler();
646 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
647}
648
649void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
650 CreateFPToFPLocations(arena_, invoke);
651}
652
653void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
654 LocationSummary* locations = invoke->GetLocations();
655 vixl::MacroAssembler* masm = GetVIXLAssembler();
656 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
657}
658
659void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
660 CreateFPToFPLocations(arena_, invoke);
661}
662
663void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
664 LocationSummary* locations = invoke->GetLocations();
665 vixl::MacroAssembler* masm = GetVIXLAssembler();
666 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
667}
668
669static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
670 LocationSummary* locations = new (arena) LocationSummary(invoke,
671 LocationSummary::kNoCall,
672 kIntrinsified);
673 locations->SetInAt(0, Location::RequiresFpuRegister());
674 locations->SetOut(Location::RequiresRegister());
675}
676
677static void GenMathRound(LocationSummary* locations,
678 bool is_double,
679 vixl::MacroAssembler* masm) {
680 FPRegister in_reg = is_double ?
681 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
682 Register out_reg = is_double ?
683 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
684 UseScratchRegisterScope temps(masm);
685 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
686
687 // 0.5 can be encoded as an immediate, so use fmov.
688 if (is_double) {
689 __ Fmov(temp1_reg, static_cast<double>(0.5));
690 } else {
691 __ Fmov(temp1_reg, static_cast<float>(0.5));
692 }
693 __ Fadd(temp1_reg, in_reg, temp1_reg);
694 __ Fcvtms(out_reg, temp1_reg);
695}
696
697void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
698 CreateFPToIntPlusTempLocations(arena_, invoke);
699}
700
701void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
702 GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler());
703}
704
705void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
706 CreateFPToIntPlusTempLocations(arena_, invoke);
707}
708
709void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
710 GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler());
711}
712
713void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
714 CreateIntToIntLocations(arena_, invoke);
715}
716
717void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
718 vixl::MacroAssembler* masm = GetVIXLAssembler();
719 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
720 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
721}
722
723void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
724 CreateIntToIntLocations(arena_, invoke);
725}
726
727void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
728 vixl::MacroAssembler* masm = GetVIXLAssembler();
729 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
730 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
731}
732
733void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
734 CreateIntToIntLocations(arena_, invoke);
735}
736
737void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
738 vixl::MacroAssembler* masm = GetVIXLAssembler();
739 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
740 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
741}
742
743void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
744 CreateIntToIntLocations(arena_, invoke);
745}
746
747void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
748 vixl::MacroAssembler* masm = GetVIXLAssembler();
749 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
750 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
751}
752
753static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
754 LocationSummary* locations = new (arena) LocationSummary(invoke,
755 LocationSummary::kNoCall,
756 kIntrinsified);
757 locations->SetInAt(0, Location::RequiresRegister());
758 locations->SetInAt(1, Location::RequiresRegister());
759}
760
761void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
762 CreateIntIntToVoidLocations(arena_, invoke);
763}
764
765void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
766 vixl::MacroAssembler* masm = GetVIXLAssembler();
767 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
768 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
769}
770
771void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
772 CreateIntIntToVoidLocations(arena_, invoke);
773}
774
775void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
776 vixl::MacroAssembler* masm = GetVIXLAssembler();
777 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
778 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
779}
780
781void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
782 CreateIntIntToVoidLocations(arena_, invoke);
783}
784
785void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
786 vixl::MacroAssembler* masm = GetVIXLAssembler();
787 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
788 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
789}
790
791void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
792 CreateIntIntToVoidLocations(arena_, invoke);
793}
794
795void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
796 vixl::MacroAssembler* masm = GetVIXLAssembler();
797 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
798 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
799}
800
801void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
802 LocationSummary* locations = new (arena_) LocationSummary(invoke,
803 LocationSummary::kNoCall,
804 kIntrinsified);
805 locations->SetOut(Location::RequiresRegister());
806}
807
808void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
809 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
810 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
811}
812
813static void GenUnsafeGet(HInvoke* invoke,
814 Primitive::Type type,
815 bool is_volatile,
816 CodeGeneratorARM64* codegen) {
817 LocationSummary* locations = invoke->GetLocations();
818 DCHECK((type == Primitive::kPrimInt) ||
819 (type == Primitive::kPrimLong) ||
820 (type == Primitive::kPrimNot));
821 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
822 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
823 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
824 Register trg = RegisterFrom(locations->Out(), type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000825 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800826
827 MemOperand mem_op(base.X(), offset);
828 if (is_volatile) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000829 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800830 codegen->LoadAcquire(invoke, trg, mem_op);
831 } else {
832 codegen->Load(type, trg, mem_op);
833 __ Dmb(InnerShareable, BarrierReads);
834 }
835 } else {
836 codegen->Load(type, trg, mem_op);
837 }
Roland Levillain4d027112015-07-01 15:41:14 +0100838
839 if (type == Primitive::kPrimNot) {
840 DCHECK(trg.IsW());
841 codegen->GetAssembler()->MaybeUnpoisonHeapReference(trg);
842 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800843}
844
845static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
846 LocationSummary* locations = new (arena) LocationSummary(invoke,
847 LocationSummary::kNoCall,
848 kIntrinsified);
849 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
850 locations->SetInAt(1, Location::RequiresRegister());
851 locations->SetInAt(2, Location::RequiresRegister());
852 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
853}
854
855void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
856 CreateIntIntIntToIntLocations(arena_, invoke);
857}
858void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
859 CreateIntIntIntToIntLocations(arena_, invoke);
860}
861void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
862 CreateIntIntIntToIntLocations(arena_, invoke);
863}
864void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
865 CreateIntIntIntToIntLocations(arena_, invoke);
866}
867void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
868 CreateIntIntIntToIntLocations(arena_, invoke);
869}
870void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
871 CreateIntIntIntToIntLocations(arena_, invoke);
872}
873
874void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
875 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
876}
877void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
878 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
879}
880void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
881 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
882}
883void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
884 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
885}
886void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
887 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
888}
889void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
890 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
891}
892
893static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
894 LocationSummary* locations = new (arena) LocationSummary(invoke,
895 LocationSummary::kNoCall,
896 kIntrinsified);
897 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
898 locations->SetInAt(1, Location::RequiresRegister());
899 locations->SetInAt(2, Location::RequiresRegister());
900 locations->SetInAt(3, Location::RequiresRegister());
901}
902
903void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
904 CreateIntIntIntIntToVoid(arena_, invoke);
905}
906void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
907 CreateIntIntIntIntToVoid(arena_, invoke);
908}
909void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
910 CreateIntIntIntIntToVoid(arena_, invoke);
911}
912void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
913 CreateIntIntIntIntToVoid(arena_, invoke);
914}
915void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
916 CreateIntIntIntIntToVoid(arena_, invoke);
917}
918void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
919 CreateIntIntIntIntToVoid(arena_, invoke);
920}
921void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
922 CreateIntIntIntIntToVoid(arena_, invoke);
923}
924void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
925 CreateIntIntIntIntToVoid(arena_, invoke);
926}
927void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
928 CreateIntIntIntIntToVoid(arena_, invoke);
929}
930
931static void GenUnsafePut(LocationSummary* locations,
932 Primitive::Type type,
933 bool is_volatile,
934 bool is_ordered,
935 CodeGeneratorARM64* codegen) {
936 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
937
938 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
939 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
940 Register value = RegisterFrom(locations->InAt(3), type);
Roland Levillain4d027112015-07-01 15:41:14 +0100941 Register source = value;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000942 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800943
944 MemOperand mem_op(base.X(), offset);
945
Roland Levillain4d027112015-07-01 15:41:14 +0100946 {
947 // We use a block to end the scratch scope before the write barrier, thus
948 // freeing the temporary registers so they can be used in `MarkGCCard`.
949 UseScratchRegisterScope temps(masm);
950
951 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
952 DCHECK(value.IsW());
953 Register temp = temps.AcquireW();
954 __ Mov(temp.W(), value.W());
955 codegen->GetAssembler()->PoisonHeapReference(temp.W());
956 source = temp;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800957 }
Roland Levillain4d027112015-07-01 15:41:14 +0100958
959 if (is_volatile || is_ordered) {
960 if (use_acquire_release) {
961 codegen->StoreRelease(type, source, mem_op);
962 } else {
963 __ Dmb(InnerShareable, BarrierAll);
964 codegen->Store(type, source, mem_op);
965 if (is_volatile) {
966 __ Dmb(InnerShareable, BarrierReads);
967 }
968 }
969 } else {
970 codegen->Store(type, source, mem_op);
971 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800972 }
973
974 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100975 bool value_can_be_null = true; // TODO: Worth finding out this information?
976 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800977 }
978}
979
980void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
981 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
982}
983void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
984 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
985}
986void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
987 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
988}
989void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
990 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
991}
992void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
993 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
994}
995void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
996 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
997}
998void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
999 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
1000}
1001void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1002 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
1003}
1004void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1005 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
1006}
1007
1008static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
1009 LocationSummary* locations = new (arena) LocationSummary(invoke,
1010 LocationSummary::kNoCall,
1011 kIntrinsified);
1012 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1013 locations->SetInAt(1, Location::RequiresRegister());
1014 locations->SetInAt(2, Location::RequiresRegister());
1015 locations->SetInAt(3, Location::RequiresRegister());
1016 locations->SetInAt(4, Location::RequiresRegister());
1017
1018 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1019}
1020
1021static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Serban Constantinescu579885a2015-02-22 20:51:33 +00001022 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001023 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
1024
1025 Register out = WRegisterFrom(locations->Out()); // Boolean result.
1026
1027 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
1028 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
1029 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
1030 Register value = RegisterFrom(locations->InAt(4), type); // Value.
1031
1032 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
1033 if (type == Primitive::kPrimNot) {
1034 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001035 bool value_can_be_null = true; // TODO: Worth finding out this information?
1036 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001037 }
1038
1039 UseScratchRegisterScope temps(masm);
1040 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1041 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
1042
1043 Register tmp_32 = tmp_value.W();
1044
1045 __ Add(tmp_ptr, base.X(), Operand(offset));
1046
Roland Levillain4d027112015-07-01 15:41:14 +01001047 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1048 codegen->GetAssembler()->PoisonHeapReference(expected);
1049 codegen->GetAssembler()->PoisonHeapReference(value);
1050 }
1051
Andreas Gampe878d58c2015-01-15 23:24:00 -08001052 // do {
1053 // tmp_value = [tmp_ptr] - expected;
1054 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1055 // result = tmp_value != 0;
1056
1057 vixl::Label loop_head, exit_loop;
Serban Constantinescu579885a2015-02-22 20:51:33 +00001058 if (use_acquire_release) {
1059 __ Bind(&loop_head);
1060 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
1061 __ Cmp(tmp_value, expected);
1062 __ B(&exit_loop, ne);
1063 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
1064 __ Cbnz(tmp_32, &loop_head);
1065 } else {
1066 __ Dmb(InnerShareable, BarrierWrites);
1067 __ Bind(&loop_head);
1068 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
1069 __ Cmp(tmp_value, expected);
1070 __ B(&exit_loop, ne);
1071 __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
1072 __ Cbnz(tmp_32, &loop_head);
1073 __ Dmb(InnerShareable, BarrierAll);
1074 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001075 __ Bind(&exit_loop);
1076 __ Cset(out, eq);
Roland Levillain4d027112015-07-01 15:41:14 +01001077
1078 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1079 codegen->GetAssembler()->UnpoisonHeapReference(value);
1080 codegen->GetAssembler()->UnpoisonHeapReference(expected);
1081 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001082}
1083
1084void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1085 CreateIntIntIntIntIntToInt(arena_, invoke);
1086}
1087void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1088 CreateIntIntIntIntIntToInt(arena_, invoke);
1089}
1090void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
1091 CreateIntIntIntIntIntToInt(arena_, invoke);
1092}
1093
1094void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1095 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1096}
1097void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1098 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
1099}
1100void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
1101 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1102}
1103
1104void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001105 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1106 LocationSummary::kCallOnSlowPath,
1107 kIntrinsified);
1108 locations->SetInAt(0, Location::RequiresRegister());
1109 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +00001110 // In case we need to go in the slow path, we can't have the output be the same
1111 // as the input: the current liveness analysis considers the input to be live
1112 // at the point of the call.
1113 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001114}
1115
1116void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
1117 vixl::MacroAssembler* masm = GetVIXLAssembler();
1118 LocationSummary* locations = invoke->GetLocations();
1119
1120 // Location of reference to data array
1121 const MemberOffset value_offset = mirror::String::ValueOffset();
1122 // Location of count
1123 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001124
1125 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
1126 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
1127 Register out = WRegisterFrom(locations->Out()); // Result character.
1128
1129 UseScratchRegisterScope temps(masm);
1130 Register temp = temps.AcquireW();
1131 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
1132
1133 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1134 // the cost.
1135 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1136 // we will not optimize the code for constants (which would save a register).
1137
1138 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1139 codegen_->AddSlowPath(slow_path);
1140
1141 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
1142 codegen_->MaybeRecordImplicitNullCheck(invoke);
1143 __ Cmp(idx, temp);
1144 __ B(hs, slow_path->GetEntryLabel());
1145
Jeff Hao848f70a2014-01-15 13:49:50 -08001146 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001147
1148 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -08001149 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -08001150
1151 __ Bind(slow_path->GetExitLabel());
1152}
1153
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001154void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001155 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1156 LocationSummary::kCall,
1157 kIntrinsified);
1158 InvokeRuntimeCallingConvention calling_convention;
1159 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1160 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1161 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1162}
1163
1164void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
1165 vixl::MacroAssembler* masm = GetVIXLAssembler();
1166 LocationSummary* locations = invoke->GetLocations();
1167
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001168 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001169 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001170
1171 Register argument = WRegisterFrom(locations->InAt(1));
1172 __ Cmp(argument, 0);
1173 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1174 codegen_->AddSlowPath(slow_path);
1175 __ B(eq, slow_path->GetEntryLabel());
1176
1177 __ Ldr(
1178 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
1179 __ Blr(lr);
1180 __ Bind(slow_path->GetExitLabel());
1181}
1182
Agi Csakiea34b402015-08-13 17:51:19 -07001183void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
1184 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1185 LocationSummary::kNoCall,
1186 kIntrinsified);
1187 locations->SetInAt(0, Location::RequiresRegister());
1188 locations->SetInAt(1, Location::RequiresRegister());
1189 // Temporary registers to store lengths of strings and for calculations.
1190 locations->AddTemp(Location::RequiresRegister());
1191 locations->AddTemp(Location::RequiresRegister());
1192
1193 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1194}
1195
1196void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
1197 vixl::MacroAssembler* masm = GetVIXLAssembler();
1198 LocationSummary* locations = invoke->GetLocations();
1199
1200 Register str = WRegisterFrom(locations->InAt(0));
1201 Register arg = WRegisterFrom(locations->InAt(1));
1202 Register out = XRegisterFrom(locations->Out());
1203
1204 UseScratchRegisterScope scratch_scope(masm);
1205 Register temp = scratch_scope.AcquireW();
1206 Register temp1 = WRegisterFrom(locations->GetTemp(0));
1207 Register temp2 = WRegisterFrom(locations->GetTemp(1));
1208
1209 vixl::Label loop;
1210 vixl::Label end;
1211 vixl::Label return_true;
1212 vixl::Label return_false;
1213
1214 // Get offsets of count, value, and class fields within a string object.
1215 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1216 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1217 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1218
1219 // Note that the null check must have been done earlier.
1220 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1221
1222 // Check if input is null, return false if it is.
1223 __ Cbz(arg, &return_false);
1224
1225 // Reference equality check, return true if same reference.
1226 __ Cmp(str, arg);
1227 __ B(&return_true, eq);
1228
1229 // Instanceof check for the argument by comparing class fields.
1230 // All string objects must have the same type since String cannot be subclassed.
1231 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1232 // If the argument is a string object, its class field must be equal to receiver's class field.
1233 __ Ldr(temp, MemOperand(str.X(), class_offset));
1234 __ Ldr(temp1, MemOperand(arg.X(), class_offset));
1235 __ Cmp(temp, temp1);
1236 __ B(&return_false, ne);
1237
1238 // Load lengths of this and argument strings.
1239 __ Ldr(temp, MemOperand(str.X(), count_offset));
1240 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1241 // Check if lengths are equal, return false if they're not.
1242 __ Cmp(temp, temp1);
1243 __ B(&return_false, ne);
1244 // Store offset of string value in preparation for comparison loop
1245 __ Mov(temp1, value_offset);
1246 // Return true if both strings are empty.
1247 __ Cbz(temp, &return_true);
1248
1249 // Assertions that must hold in order to compare strings 4 characters at a time.
1250 DCHECK_ALIGNED(value_offset, 8);
1251 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1252
1253 temp1 = temp1.X();
1254 temp2 = temp2.X();
1255
1256 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1257 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1258 __ Bind(&loop);
1259 __ Ldr(out, MemOperand(str.X(), temp1));
1260 __ Ldr(temp2, MemOperand(arg.X(), temp1));
1261 __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
1262 __ Cmp(out, temp2);
1263 __ B(&return_false, ne);
1264 __ Sub(temp, temp, Operand(4), SetFlags);
1265 __ B(&loop, gt);
1266
1267 // Return true and exit the function.
1268 // If loop does not result in returning false, we return true.
1269 __ Bind(&return_true);
1270 __ Mov(out, 1);
1271 __ B(&end);
1272
1273 // Return false and exit the function.
1274 __ Bind(&return_false);
1275 __ Mov(out, 0);
1276 __ Bind(&end);
1277}
1278
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001279static void GenerateVisitStringIndexOf(HInvoke* invoke,
1280 vixl::MacroAssembler* masm,
1281 CodeGeneratorARM64* codegen,
1282 ArenaAllocator* allocator,
1283 bool start_at_zero) {
1284 LocationSummary* locations = invoke->GetLocations();
1285 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
1286
1287 // Note that the null check must have been done earlier.
1288 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1289
1290 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1291 // or directly dispatch if we have a constant.
1292 SlowPathCodeARM64* slow_path = nullptr;
1293 if (invoke->InputAt(1)->IsIntConstant()) {
1294 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) {
1295 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1296 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1297 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1298 codegen->AddSlowPath(slow_path);
1299 __ B(slow_path->GetEntryLabel());
1300 __ Bind(slow_path->GetExitLabel());
1301 return;
1302 }
1303 } else {
1304 Register char_reg = WRegisterFrom(locations->InAt(1));
1305 __ Mov(tmp_reg, 0xFFFF);
1306 __ Cmp(char_reg, Operand(tmp_reg));
1307 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1308 codegen->AddSlowPath(slow_path);
1309 __ B(hi, slow_path->GetEntryLabel());
1310 }
1311
1312 if (start_at_zero) {
1313 // Start-index = 0.
1314 __ Mov(tmp_reg, 0);
1315 }
1316
1317 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
1318 __ Blr(lr);
1319
1320 if (slow_path != nullptr) {
1321 __ Bind(slow_path->GetExitLabel());
1322 }
1323}
1324
1325void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1326 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1327 LocationSummary::kCall,
1328 kIntrinsified);
1329 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1330 // best to align the inputs accordingly.
1331 InvokeRuntimeCallingConvention calling_convention;
1332 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1333 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1334 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1335
1336 // Need a temp for slow-path codepoint compare, and need to send start_index=0.
1337 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1338}
1339
1340void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
1341 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), true);
1342}
1343
1344void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1345 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1346 LocationSummary::kCall,
1347 kIntrinsified);
1348 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1349 // best to align the inputs accordingly.
1350 InvokeRuntimeCallingConvention calling_convention;
1351 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1352 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1353 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1354 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1355
1356 // Need a temp for slow-path codepoint compare.
1357 locations->AddTemp(Location::RequiresRegister());
1358}
1359
1360void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1361 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), false);
1362}
1363
Jeff Hao848f70a2014-01-15 13:49:50 -08001364void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1365 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1366 LocationSummary::kCall,
1367 kIntrinsified);
1368 InvokeRuntimeCallingConvention calling_convention;
1369 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1370 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1371 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1372 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1373 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1374}
1375
1376void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1377 vixl::MacroAssembler* masm = GetVIXLAssembler();
1378 LocationSummary* locations = invoke->GetLocations();
1379
1380 Register byte_array = WRegisterFrom(locations->InAt(0));
1381 __ Cmp(byte_array, 0);
1382 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1383 codegen_->AddSlowPath(slow_path);
1384 __ B(eq, slow_path->GetEntryLabel());
1385
1386 __ Ldr(lr,
1387 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
1388 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1389 __ Blr(lr);
1390 __ Bind(slow_path->GetExitLabel());
1391}
1392
1393void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1394 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1395 LocationSummary::kCall,
1396 kIntrinsified);
1397 InvokeRuntimeCallingConvention calling_convention;
1398 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1399 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1400 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1401 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1402}
1403
1404void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1405 vixl::MacroAssembler* masm = GetVIXLAssembler();
1406
1407 __ Ldr(lr,
1408 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
1409 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1410 __ Blr(lr);
1411}
1412
1413void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1414 // The inputs plus one temp.
1415 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1416 LocationSummary::kCall,
1417 kIntrinsified);
1418 InvokeRuntimeCallingConvention calling_convention;
1419 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1420 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1421 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1422 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1423}
1424
1425void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1426 vixl::MacroAssembler* masm = GetVIXLAssembler();
1427 LocationSummary* locations = invoke->GetLocations();
1428
1429 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1430 __ Cmp(string_to_copy, 0);
1431 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1432 codegen_->AddSlowPath(slow_path);
1433 __ B(eq, slow_path->GetEntryLabel());
1434
1435 __ Ldr(lr,
1436 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
1437 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1438 __ Blr(lr);
1439 __ Bind(slow_path->GetExitLabel());
1440}
1441
Andreas Gampe878d58c2015-01-15 23:24:00 -08001442// Unimplemented intrinsics.
1443
1444#define UNIMPLEMENTED_INTRINSIC(Name) \
1445void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1446} \
1447void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1448}
1449
1450UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001451UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001452UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001453
Roland Levillain4d027112015-07-01 15:41:14 +01001454#undef UNIMPLEMENTED_INTRINSIC
1455
1456#undef __
1457
Andreas Gampe878d58c2015-01-15 23:24:00 -08001458} // namespace arm64
1459} // namespace art